diff --git a/app/api/manual-cache/route.ts b/app/api/manual-cache/route.ts new file mode 100644 index 000000000..c9afeb89b --- /dev/null +++ b/app/api/manual-cache/route.ts @@ -0,0 +1,34 @@ +import { NextResponse } from 'next/server'; +import fs from 'fs'; +import path from 'path'; + +// Note: Storing in /tmp to work around serverless readonly filesystems +// However, since serverless instances are ephemeral, ideally you'd use Redis or Vercel KV. +// We are storing in /tmp and verifying hash structure to prevent Path Traversal. + +export async function POST(req: Request) { + try { + const { hash, response } = await req.json(); + + if (!hash || !response) { + return NextResponse.json({ error: 'Missing hash or response' }, { status: 400 }); + } + + // Validate hash to be strictly 32 alphanumeric hex chars to prevent path traversal + if (!/^[a-fA-F0-9]{32}$/.test(hash)) { + return NextResponse.json({ error: 'Invalid hash format' }, { status: 400 }); + } + + // Since serverless is readonly outside /tmp, use /tmp + const CACHE_DIR = path.join('/tmp', '.openmaic', 'manual_cache'); + if (!fs.existsSync(CACHE_DIR)) { + fs.mkdirSync(CACHE_DIR, { recursive: true }); + } + + fs.writeFileSync(path.join(CACHE_DIR, `${hash}.json`), response, 'utf-8'); + + return NextResponse.json({ success: true }); + } catch (error) { + return NextResponse.json({ error: error instanceof Error ? error.message : 'Unknown error' }, { status: 500 }); + } +} diff --git a/app/api/web-search/route.ts b/app/api/web-search/route.ts index 5a9708260..f96a225cd 100644 --- a/app/api/web-search/route.ts +++ b/app/api/web-search/route.ts @@ -8,6 +8,7 @@ import { NextRequest } from 'next/server'; import { callLLM } from '@/lib/ai/llm'; import { searchWithTavily, formatSearchResultsAsContext } from '@/lib/web-search/tavily'; +import { searchWithSearXNG } from '@/lib/web-search/searxng'; import { resolveWebSearchApiKey } from '@/lib/server/provider-config'; import { createLogger } from '@/lib/logger'; import { apiError, apiSuccess } from '@/lib/server/api-response'; @@ -28,10 +29,14 @@ export async function POST(req: NextRequest) { query: requestQuery, pdfText, apiKey: clientApiKey, + providerId, + baseUrl, } = body as { query?: string; pdfText?: string; apiKey?: string; + providerId?: string; + baseUrl?: string; }; query = requestQuery; @@ -39,13 +44,22 @@ export async function POST(req: NextRequest) { return apiError('MISSING_REQUIRED_FIELD', 400, 'query is required'); } - const apiKey = resolveWebSearchApiKey(clientApiKey); - if (!apiKey) { - return apiError( - 'MISSING_API_KEY', - 400, - 'Tavily API key is not configured. Set it in Settings → Web Search or set TAVILY_API_KEY env var.', - ); + let result; + if (providerId === 'searxng') { + result = await searchWithSearXNG({ + query: query.trim(), + baseUrl: baseUrl || process.env.SEARXNG_URL || 'http://127.0.0.1:8080/search' + }); + } else { + const apiKey = resolveWebSearchApiKey(clientApiKey); + if (!apiKey) { + return apiError( + 'MISSING_API_KEY', + 400, + 'Tavily API key is not configured. Set it in Settings → Web Search or set TAVILY_API_KEY env var.', + ); + } + result = await searchWithTavily({ query: query.trim(), apiKey }); } // Clamp rewrite input at the route boundary; framework body limits still apply to total request size. diff --git a/components/generation/generating-progress.tsx b/components/generation/generating-progress.tsx index 639e79d31..17ca7f256 100644 --- a/components/generation/generating-progress.tsx +++ b/components/generation/generating-progress.tsx @@ -2,8 +2,11 @@ import { useEffect, useState } from 'react'; import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card'; -import { Loader2, CheckCircle2, XCircle, Circle } from 'lucide-react'; +import { Loader2, CheckCircle2, XCircle, Circle, Copy, Play } from 'lucide-react'; import { useI18n } from '@/lib/hooks/use-i18n'; +import { Button } from '@/components/ui/button'; +import { Textarea } from '@/components/ui/textarea'; +import { toast } from 'sonner'; interface GeneratingProgressProps { outlineReady: boolean; // Is outline generation complete? @@ -62,6 +65,14 @@ export function GeneratingProgress({ }: GeneratingProgressProps) { const { t } = useI18n(); const [dots, setDots] = useState(''); + const [manualResponse, setManualResponse] = useState(''); + const [isSubmitting, setIsSubmitting] = useState(false); + + // Extract hash and prompt + const isManualIntervention = error?.startsWith('MANUAL_INTERVENTION_REQUIRED|||'); + const errorParts = isManualIntervention ? error?.split('|||') : []; + const promptHash = (errorParts && errorParts[1]) || ''; + const manualPromptText = (errorParts && errorParts[2]) || ''; // Animated dots for loading state useEffect(() => { @@ -73,12 +84,41 @@ export function GeneratingProgress({ } }, [error, firstPageReady]); + const handleCopyPrompt = () => { + if (manualPromptText) { + navigator.clipboard.writeText(manualPromptText); + toast.success("Prompt copied to clipboard"); + } + }; + + const handleSubmitManualResponse = async () => { + setIsSubmitting(true); + try { + await fetch('/api/manual-cache', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ hash: promptHash, response: manualResponse }) + }); + + // Reload the page. The user will click "Generate" again, + // but the backend will instantly skip the step using the cache! + toast.success("Saved! Please restart the generation."); + window.location.reload(); + } catch (_e) { + toast.error("Failed to save response."); + } finally { + setIsSubmitting(false); + } + }; + return (
- {error ? ( + {isManualIntervention ? ( + <> Action Required: Gemini Blocked Output + ) : error ? ( <> {t('generation.generationFailed')} @@ -98,40 +138,67 @@ export function GeneratingProgress({ - {/* Two milestone status items */} -
- - -
+ {isManualIntervention ? ( +
+

+ The API blocked this specific prompt. Copy the text, paste it into the Gemini Web App, and paste the JSON result here. +

+
+