Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .nvmrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
20
13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,16 @@ Open [http://localhost:3000](http://localhost:3000)
## License

MIT

## AI Provider configuration

# Default provider (OpenAI)
AI_PROVIDER=openai
OPENAI_API_KEY=your_openai_api_key
OPENAI_MODEL=gpt-4o-mini
OPENAI_BASE_URL=https://api.openai.com/v1

# Alternative providers (optional)
ANTHROPIC_API_KEY=your_anthropic_api_key
GEMINI_API_KEY=your_gemini_api_key
GROQ_API_KEY=your_groq_api_key
32 changes: 32 additions & 0 deletions app/.github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: CI

on:
push:
branches: [main]
pull_request:

jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
- run: npm ci
- run: npm run lint
- run: npm run build --if-present
- run: npm run typecheck --if-present

vercel-preview:
if: secrets.VERCEL_TOKEN && secrets.VERCEL_ORG_ID && secrets.VERCEL_PROJECT_ID
needs: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: amondnet/vercel-action@v25
with:
vercel-token: ${{ secrets.VERCEL_TOKEN }}
vercel-org-id: ${{ secrets.VERCEL_ORG_ID }}
vercel-project-id: ${{ secrets.VERCEL_PROJECT_ID }}
vercel-args: '--prod=false'
98 changes: 98 additions & 0 deletions app/api/ai/chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import { NextRequest, NextResponse } from 'next/server';
import { streamChat, ChatMessage } from '@/lib/ai/openai';

// Opt into the Edge runtime. This allows streaming responses with low
// latency and keeps dependencies out of the Node.js layer.
export const runtime = 'edge';

/**
* POST /api/ai/chat
*
* Accepts a JSON body containing a list of chat messages and optional model
* configuration. Invokes the OpenAI chat completion API and streams the
* assistant's response back as raw text. If another AI provider is
* configured via AI_PROVIDER, a 400 will be returned.
*/
export async function POST(req: NextRequest) {
try {
const { messages, model, temperature } = await req.json();

// Basic validation
if (!Array.isArray(messages)) {
return NextResponse.json({ success: false, error: 'messages must be an array' }, { status: 400 });
}

// Only support openai provider for now
const provider = process.env.AI_PROVIDER || 'openai';
if (provider !== 'openai') {
return NextResponse.json({ success: false, error: `Unsupported AI provider: ${provider}` }, { status: 400 });
}

// Call OpenAI and forward the response
const response = await streamChat({
messages: messages as ChatMessage[],
model,
temperature,
});

if (!response.ok || !response.body) {
let errorMessage: string;
try {
const data = await response.json();
errorMessage = data?.error?.message || response.statusText;
} catch {
errorMessage = response.statusText;
}
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status });
}

// Transform OpenAI's SSE stream into raw text
const encoder = new TextEncoder();
const openaiStream = response.body;
const stream = new ReadableStream<Uint8Array>({
async start(controller) {
const reader = openaiStream!.getReader();
const decoder = new TextDecoder('utf-8');
let buffer = '';
const push = (text: string) => {
controller.enqueue(encoder.encode(text));
};
while (true) {
const { value, done } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split('\n');
buffer = lines.pop() ?? '';
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed.startsWith('data:')) continue;
const payload = trimmed.replace(/^data:\s*/, '');
if (payload === '[DONE]') {
controller.close();
return;
}
try {
const parsed = JSON.parse(payload);
const delta: string = parsed.choices?.[0]?.delta?.content ?? '';
if (delta) {
push(delta);
}
} catch {
// Skip malformed lines
}
}
}
controller.close();
},
});

return new Response(stream, {
headers: {
'Content-Type': 'text/plain; charset=utf-8',
},
});
} catch (err) {
console.error('[api/ai/chat] Error:', err);
return NextResponse.json({ success: false, error: (err as Error)?.message || 'Internal error' }, { status: 500 });
}
}
75 changes: 75 additions & 0 deletions app/api/health/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
// app/api/health/route.ts
import { NextResponse } from "next/server";

export const dynamic = "force-dynamic";

type Check = { ok: boolean; message?: string };

async function checkOpenAI(): Promise<Check> {
try {
const key = process.env.OPENAI_API_KEY;
if (!key) return { ok: false, message: "Missing OPENAI_API_KEY" };

// Lightweight "are you alive?" request
const r = await fetch("https://api.openai.com/v1/models", {
headers: { Authorization: `Bearer ${key}` },
cache: "no-store",
});

if (!r.ok) return { ok: false, message: `HTTP ${r.status}` };
return { ok: true };
} catch (err: any) {
return { ok: false, message: err?.message || "OpenAI check failed" };
}
}

async function checkSupabase(): Promise<Check> {
try {
const url = process.env.SUPABASE_URL;
const anon = process.env.SUPABASE_ANON_KEY;
if (!url || !anon)
return { ok: false, message: "Missing SUPABASE_URL or SUPABASE_ANON_KEY" };

// Minimal check: read one id from the apps table (200/206 means OK)
const r = await fetch(`${url}/rest/v1/apps?select=id&limit=1`, {
headers: {
apikey: anon,
Authorization: `Bearer ${anon}`,
},
cache: "no-store",
});

if (r.status === 200 || r.status === 206) return { ok: true };
return { ok: false, message: `HTTP ${r.status} (apps table or policy?)` };
} catch (err: any) {
return { ok: false, message: err?.message || "Supabase check failed" };
}
}

async function checkFirecrawl(): Promise<Check> {
const key = process.env.FIRECRAWL_KEY;
return key ? { ok: true } : { ok: false, message: "Missing FIRECRAWL_KEY" };
}

async function checkE2B(): Promise<Check> {
const key = process.env.E2B_API_KEY;
return key ? { ok: true } : { ok: false, message: "Missing E2B_API_KEY" };
}

export async function GET() {
const [openai, supabase, firecrawl, e2b] = await Promise.all([
checkOpenAI(),
checkSupabase(),
checkFirecrawl(),
checkE2B(),
]);

// Consider OpenAI + Supabase as must-pass
const ok = openai.ok && supabase.ok;
const status = ok ? 200 : 503;

return NextResponse.json(
{ ok, openai, supabase, firecrawl, e2b },
{ status }
);
}
8 changes: 8 additions & 0 deletions app/app/template.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import { Suspense } from "react";

export const dynamic = "force-dynamic";
export const revalidate = 0;

export default function Template({ children }: { children: React.ReactNode }) {
return <Suspense fallback={null}>{children}</Suspense>;
}
21 changes: 11 additions & 10 deletions app/layout.tsx
Original file line number Diff line number Diff line change
@@ -1,23 +1,24 @@
import type { Metadata } from "next";
import { Inter } from "next/font/google";
import "./globals.css";
import { Inter } from "next/font/google";
import { Suspense } from "react"; // <-- add this
import DiagnosticsPanel from '@/components/DiagnosticsPanel';

export const dynamic = 'force-dynamic';
export const revalidate = 0;
const inter = Inter({ subsets: ["latin"] });

export const metadata: Metadata = {
title: "Open Lovable",
description: "Re-imagine any website in seconds with AI-powered website builder.",
};

export default function RootLayout({
children,
}: Readonly<{
}: {
children: React.ReactNode;
}>) {
}) {
return (
<html lang="en">
<body className={inter.className}>
{children}
<Suspense fallback={null}> {/* <-- wrap children */}
{children}
</Suspense>
<DiagnosticsPanel />
</body>
</html>
);
Expand Down
3 changes: 2 additions & 1 deletion app/page.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
'use client';

import { Suspense } from 'react';
import { useState, useEffect, useRef } from 'react';
import { useSearchParams, useRouter } from 'next/navigation';
import { appConfig } from '@/config/app.config';
Expand Down Expand Up @@ -3426,4 +3427,4 @@ Focus on the key sections and content, making it clean and modern.`;

</div>
);
}
}
52 changes: 52 additions & 0 deletions components/DiagnosticsPanel.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
'use client';

import { useState, useEffect } from 'react';

interface ErrorEntry {
message: string;
stack?: string;
timestamp: string;
}

export default function DiagnosticsPanel() {
// Only show in development
if (process.env.NODE_ENV !== 'development') return null;

const [errors, setErrors] = useState<ErrorEntry[]>([]);

useEffect(() => {
function handleError(event: ErrorEvent) {
setErrors((prev) => [
...prev,
{ message: event.message, stack: event.error?.stack, timestamp: new Date().toISOString() },
]);
}

function handleRejection(event: PromiseRejectionEvent) {
setErrors((prev) => [
...prev,
{ message: event.reason?.message || String(event.reason), stack: event.reason?.stack, timestamp: new Date().toISOString() },
]);
}

window.addEventListener('error', handleError);
window.addEventListener('unhandledrejection', handleRejection);
return () => {
window.removeEventListener('error', handleError);
window.removeEventListener('unhandledrejection', handleRejection);
};
}, []);

return (
<div style={{ position: 'fixed', bottom: 0, right: 0, width: 300, maxHeight: 200, overflowY: 'auto', background: '#fff', border: '1px solid #ccc', padding: 10, zIndex: 9999 }}>
<h4>Diagnostics Panel</h4>
{errors.map((err, idx) => (
<div key={idx} style={{ marginBottom: 10 }}>
<strong>{err.timestamp}</strong>
<div>{err.message}</div>
{err.stack && <pre style={{ whiteSpace: 'pre-wrap' }}>{err.stack}</pre>}
</div>
))}
</div>
);
}
Loading