diff --git a/skills/.experimental/supabase-security-audit/SKILL.md b/skills/.experimental/supabase-security-audit/SKILL.md new file mode 100644 index 00000000..0c406e79 --- /dev/null +++ b/skills/.experimental/supabase-security-audit/SKILL.md @@ -0,0 +1,106 @@ +--- +name: supabase-security-audit +description: Audit and harden Supabase or PostgreSQL projects by reviewing database schema, row level security coverage, policy correctness, service-role exposure, auth boundaries, and common application security mistakes. Use when Codex needs to add or fix RLS on existing tables, inspect Supabase migrations, review server and client auth code, or investigate unknown security vulnerabilities in a Supabase-backed app. +--- + +# Supabase Security Audit + +## Overview + +Audit Supabase and PostgreSQL projects for authorization gaps and common security mistakes. Prefer concrete findings and code or SQL fixes over generic advice. + +## Quick Start + +1. Read the migration files under `supabase/`, `db/`, or other SQL directories. +2. Read the Supabase client wrappers, auth helpers, API routes, server actions, storage handlers, and env loaders. +3. Run `python3 scripts/audit_supabase_security.py ` from this skill directory when a static scan will save time. +4. Read `references/audit-checklist.md` for the full review sequence. +5. Read `references/rls-policy-patterns.md` when writing or tightening policies. + +## Workflow + +### 1. Inventory the trust boundaries + +List: + +- public tables and views +- user-owned tables +- backend-only tables +- service-role code paths +- client code that talks to Supabase directly +- privileged functions, triggers, workers, and webhooks + +Treat every `public` table as user-reachable until proven otherwise. + +### 2. Check RLS coverage first + +For each application table: + +- Ensure `alter table ... enable row level security` exists. +- Prefer `force row level security` when owners should still be bound by policies. +- Treat `RLS enabled but no policies` as deny-all. Accept that state only when the table is intentionally backend-only. +- Flag `using (true)` and `with check (true)` for review instead of assuming they are safe. + +If a table has no RLS, add it before doing anything else. + +### 3. Write least-privilege policies + +Choose the smallest valid audience: + +- owner only +- admin only +- public read with scoped write +- join-based access through a parent ownership table + +Avoid blanket `for all` policies unless the same rule is correct for every command. Prefer separate `select`, `insert`, `update`, and `delete` policies when rules differ. + +### 4. Review privileged SQL + +Inspect: + +- `security definer` functions +- triggers that write rows on behalf of users +- grants to `anon` or `authenticated` +- views or functions that can bypass intended RLS behavior +- migrations that backfill data and forget to restore protections + +Require a concrete reason for every privileged object. When a `security definer` function is necessary, keep it schema-qualified and set an explicit `search_path`. + +### 5. Review application-side security + +Check for: + +- service-role secrets in client bundles or `NEXT_PUBLIC_*` variables +- API routes and server actions that trust user input without ownership checks +- storage upload or signing endpoints that let one user act on another user's files +- admin-only flows guarded only in the UI +- worker or webhook code that writes to tables whose policies assume end-user auth + +Keep server-side authorization checks even when RLS already exists. + +### 6. Apply fixes and verify + +When hardening the project: + +- create a new migration instead of rewriting history unless the repo clearly treats the schema as disposable +- enable RLS on uncovered tables +- add or tighten policies +- remove or narrow risky grants and helper functions +- verify that allowed actors still succeed and disallowed actors fail + +## Output + +Report findings in severity order with: + +- object or file +- impact +- exact fix +- follow-up verification + +When asked to implement hardening, summarize which actor can now access each protected table or endpoint. + +## Resources + +- `scripts/audit_supabase_security.py`: Static scanner for SQL RLS coverage, risky policies, privileged functions, and basic code-side exposure checks. +- `references/audit-checklist.md`: Full review checklist and live-database SQL queries. +- `references/rls-policy-patterns.md`: Reusable policy patterns for owner, admin, public, and backend-only tables. diff --git a/skills/.experimental/supabase-security-audit/agents/openai.yaml b/skills/.experimental/supabase-security-audit/agents/openai.yaml new file mode 100644 index 00000000..67c56602 --- /dev/null +++ b/skills/.experimental/supabase-security-audit/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Supabase Security Audit" + short_description: "Audit Supabase security, RLS, and auth boundaries" + default_prompt: "Use $supabase-security-audit to review database RLS, auth boundaries, and security risks in this project." diff --git a/skills/.experimental/supabase-security-audit/references/audit-checklist.md b/skills/.experimental/supabase-security-audit/references/audit-checklist.md new file mode 100644 index 00000000..d18688ae --- /dev/null +++ b/skills/.experimental/supabase-security-audit/references/audit-checklist.md @@ -0,0 +1,94 @@ +# Audit Checklist + +Use this checklist when the task is a real security review rather than a quick RLS fix. + +## 1. Schema inventory + +- List every application table, view, function, trigger, bucket, worker, and webhook. +- Mark each table as `user-facing`, `shared/public-read`, `admin-only`, or `backend-only`. +- Treat anything in the `public` schema as exposed to authenticated users unless proven otherwise. + +## 2. RLS and policy review + +- Confirm `enable row level security` on every application table. +- Decide whether `force row level security` is needed. +- Review each policy by command, not only by table. +- Check `using` and `with check` separately. +- Flag `for all`, `using (true)`, and `with check (true)` for manual review. +- Confirm child-table access is constrained through the owning parent row. + +## 3. Privileged SQL review + +- Review every `security definer` function. +- Require an explicit `search_path` on privileged functions. +- Review custom grants to `anon`, `authenticated`, and `public`. +- Check triggers that insert rows into protected tables. +- Verify helper functions do not create policy recursion against RLS-protected tables. + +## 4. App and API review + +- Confirm service-role keys stay on the server only. +- Check `NEXT_PUBLIC_*` env vars for accidental secret exposure. +- Verify API routes and server actions enforce ownership before writes. +- Check uploads, download signing, and storage paths for cross-tenant access. +- Confirm admin flows are enforced on the server, not only hidden in the UI. +- Review webhook handlers and workers that bypass user-context RLS. + +## 5. Verification + +- Test one allowed actor and one disallowed actor for each sensitive table or endpoint. +- Confirm public pages only read rows intended for public access. +- Confirm owners cannot mutate another owner's rows. +- Confirm backend-only tables deny normal user tokens. + +## Live SQL Queries + +### Tables and RLS state + +```sql +select + n.nspname as schema_name, + c.relname as table_name, + c.relrowsecurity as rls_enabled, + c.relforcerowsecurity as rls_forced +from pg_class c +join pg_namespace n on n.oid = c.relnamespace +where c.relkind = 'r' + and n.nspname not in ('pg_catalog', 'information_schema') +order by 1, 2; +``` + +### Policies + +```sql +select + schemaname, + tablename, + policyname, + cmd, + roles, + qual, + with_check +from pg_policies +order by 1, 2, 3; +``` + +### Security definer functions + +```sql +select + n.nspname as schema_name, + p.proname as function_name, + p.prosecdef as security_definer +from pg_proc p +join pg_namespace n on n.oid = p.pronamespace +where n.nspname not in ('pg_catalog', 'information_schema') + and p.prosecdef +order by 1, 2; +``` + +## Triage + +- `High`: public write access, cross-tenant reads, client-exposed secrets, or admin bypass. +- `Medium`: missing RLS, broad policies without proof they are intentional, unsafe privileged helpers. +- `Low`: missing hardening, incomplete verification, or hygiene issues that do not create direct access today. diff --git a/skills/.experimental/supabase-security-audit/references/rls-policy-patterns.md b/skills/.experimental/supabase-security-audit/references/rls-policy-patterns.md new file mode 100644 index 00000000..d48c8722 --- /dev/null +++ b/skills/.experimental/supabase-security-audit/references/rls-policy-patterns.md @@ -0,0 +1,100 @@ +# RLS Policy Patterns + +Use these patterns as starting points. Adapt column names and helper functions to the project. + +## Owner can read and update their own row + +```sql +alter table public.profiles enable row level security; + +create policy "profiles self read" +on public.profiles +for select +using (id = auth.uid()); + +create policy "profiles self update" +on public.profiles +for update +using (id = auth.uid()) +with check (id = auth.uid()); +``` + +## Owner inserts child rows + +```sql +create policy "orders buyer insert" +on public.orders +for insert +with check (buyer_id = auth.uid()); +``` + +Match the inserted owner column in `with check`. Do not rely on a UI-hidden field. + +## Child table inherits access from parent ownership + +```sql +create policy "assets owner read" +on public.product_assets +for select +using ( + exists ( + select 1 + from public.products p + where p.id = product_id + and p.seller_id = auth.uid() + ) +); +``` + +Use `exists` against the owning table when the child row has no direct `user_id`. + +## Public read with owner or admin override + +```sql +create policy "products public approved read" +on public.products +for select +using ( + status = 'approved' + or seller_id = auth.uid() + or public.is_admin() +); +``` + +Reserve public read for explicitly public rows. Keep write policies separate. + +## Admin-only table + +```sql +alter table public.admin_actions enable row level security; + +create policy "admin actions admin read" +on public.admin_actions +for select +using (public.is_admin()); +``` + +Prefer explicit admin-only policies over application-only checks. + +## Backend-only table + +```sql +alter table public.internal_jobs enable row level security; +``` + +Leave the table with no user-facing policies when only service-role code should access it. Document why the deny-all state is intentional. + +## Security definer helper notes + +- Use `security definer` only when ordinary RLS-aware SQL cannot express the requirement. +- Set `search_path` explicitly on privileged functions. +- Keep helper functions schema-qualified. +- Avoid helper functions that query the same RLS-protected table used in their calling policy unless the recursion behavior is proven safe. + +## Anti-patterns + +- `with check (true)` on user-driven inserts or updates. +- `using (true)` on update or delete policies. +- one `for all` policy when read and write rules differ. +- trusting a service-role API route without server-side ownership checks. +- exposing secrets through `NEXT_PUBLIC_*`. diff --git a/skills/.experimental/supabase-security-audit/scripts/audit_supabase_security.py b/skills/.experimental/supabase-security-audit/scripts/audit_supabase_security.py new file mode 100755 index 00000000..a5e49ad8 --- /dev/null +++ b/skills/.experimental/supabase-security-audit/scripts/audit_supabase_security.py @@ -0,0 +1,431 @@ +#!/usr/bin/env python3 +"""Static Supabase/Postgres security audit helper.""" + +from __future__ import annotations + +import argparse +import re +from dataclasses import dataclass +from pathlib import Path + +SKIP_DIRS = { + ".git", + ".next", + ".turbo", + "build", + "coverage", + "dist", + "node_modules", + "vendor", +} +SQL_EXTENSIONS = {".sql"} +CODE_EXTENSIONS = {".js", ".jsx", ".ts", ".tsx", ".mjs", ".cjs"} +SYSTEM_SCHEMAS = {"pg_catalog", "information_schema", "pg_toast"} +RISKY_PUBLIC_ENV_RE = re.compile( + r"\bNEXT_PUBLIC_[A-Z0-9_]*(?:SECRET|TOKEN|PRIVATE|SERVICE_ROLE|WEBHOOK)[A-Z0-9_]*\b" +) + + +@dataclass +class Finding: + severity: str + kind: str + location: str + message: str + + +def should_skip(path: Path) -> bool: + return any(part in SKIP_DIRS for part in path.parts) + + +def strip_comments(text: str) -> str: + text = re.sub(r"/\*.*?\*/", "", text, flags=re.S) + return re.sub(r"--[^\n]*", "", text) + + +def split_sql_statements(text: str) -> list[str]: + statements: list[str] = [] + current: list[str] = [] + i = 0 + in_single = False + in_double = False + dollar_tag: str | None = None + + while i < len(text): + ch = text[i] + + if dollar_tag: + if text.startswith(dollar_tag, i): + current.append(dollar_tag) + i += len(dollar_tag) + dollar_tag = None + continue + current.append(ch) + i += 1 + continue + + if in_single: + current.append(ch) + if ch == "'" and not text.startswith("''", i): + in_single = False + elif text.startswith("''", i): + current.append("'") + i += 2 + continue + i += 1 + continue + + if in_double: + current.append(ch) + if ch == '"': + in_double = False + i += 1 + continue + + if ch == "'": + in_single = True + current.append(ch) + i += 1 + continue + + if ch == '"': + in_double = True + current.append(ch) + i += 1 + continue + + if ch == "$": + match = re.match(r"\$[A-Za-z0-9_]*\$", text[i:]) + if match: + dollar_tag = match.group(0) + current.append(dollar_tag) + i += len(dollar_tag) + continue + + if ch == ";": + statement = "".join(current).strip() + if statement: + statements.append(statement) + current = [] + i += 1 + continue + + current.append(ch) + i += 1 + + tail = "".join(current).strip() + if tail: + statements.append(tail) + return statements + + +def normalize_identifier(raw: str) -> str: + parts = [part.strip().strip('"') for part in raw.strip().split(".") if part.strip()] + if not parts: + return raw.strip() + if len(parts) == 1: + return f"public.{parts[0]}" + return ".".join(parts) + + +def collect_files(root: Path, suffixes: set[str]) -> list[Path]: + files: list[Path] = [] + for path in root.rglob("*"): + if not path.is_file(): + continue + if should_skip(path): + continue + if path.suffix.lower() in suffixes: + files.append(path) + return sorted(files) + + +def audit_sql(root: Path) -> tuple[list[Finding], dict[str, dict[str, object]]]: + findings: list[Finding] = [] + tables: dict[str, dict[str, object]] = {} + + create_table_re = re.compile( + r"^create\s+table\s+(?:if\s+not\s+exists\s+)?([A-Za-z0-9_\".]+)", + re.I, + ) + enable_rls_re = re.compile( + r"^alter\s+table\s+(?:if\s+exists\s+)?([A-Za-z0-9_\".]+)\s+enable\s+row\s+level\s+security$", + re.I, + ) + force_rls_re = re.compile( + r"^alter\s+table\s+(?:if\s+exists\s+)?([A-Za-z0-9_\".]+)\s+force\s+row\s+level\s+security$", + re.I, + ) + create_policy_re = re.compile( + r"^create\s+policy\s+.+?\s+on\s+([A-Za-z0-9_\".]+)\b", + re.I | re.S, + ) + create_function_re = re.compile( + r"^create\s+(?:or\s+replace\s+)?function\s+([A-Za-z0-9_\".]+)\b", + re.I, + ) + grant_re = re.compile( + r"^grant\s+(.+?)\s+on\s+(?:table\s+)?([A-Za-z0-9_\".]+)\s+to\s+(.+)$", + re.I | re.S, + ) + + for sql_file in collect_files(root, SQL_EXTENSIONS): + relative = sql_file.relative_to(root) + text = strip_comments(sql_file.read_text()) + for statement in split_sql_statements(text): + normalized = " ".join(statement.split()) + lower = normalized.lower() + + match = create_table_re.match(normalized) + if match: + name = normalize_identifier(match.group(1)) + schema = name.split(".", 1)[0] + if schema not in SYSTEM_SCHEMAS: + tables.setdefault( + name, + { + "file": str(relative), + "rls": False, + "force": False, + "policies": [], + }, + ) + continue + + match = enable_rls_re.match(normalized) + if match: + name = normalize_identifier(match.group(1)) + tables.setdefault( + name, + {"file": str(relative), "rls": False, "force": False, "policies": []}, + )["rls"] = True + continue + + match = force_rls_re.match(normalized) + if match: + name = normalize_identifier(match.group(1)) + entry = tables.setdefault( + name, + {"file": str(relative), "rls": False, "force": False, "policies": []}, + ) + entry["force"] = True + continue + + match = create_policy_re.match(normalized) + if match: + name = normalize_identifier(match.group(1)) + policy = { + "file": str(relative), + "command": "all", + "using_true": bool(re.search(r"\busing\s*\(\s*true\s*\)", lower)), + "with_check_true": bool(re.search(r"\bwith\s+check\s*\(\s*true\s*\)", lower)), + } + cmd_match = re.search(r"\bfor\s+(select|insert|update|delete|all)\b", lower) + if cmd_match: + policy["command"] = cmd_match.group(1) + tables.setdefault( + name, + {"file": str(relative), "rls": False, "force": False, "policies": []}, + )["policies"].append(policy) + continue + + match = create_function_re.match(normalized) + if match and "security definer" in lower: + func_name = normalize_identifier(match.group(1)) + if "set search_path" in lower: + findings.append( + Finding( + "info", + "security-definer", + str(relative), + f"{func_name} uses SECURITY DEFINER and should be reviewed like privileged code.", + ) + ) + else: + findings.append( + Finding( + "medium", + "security-definer", + str(relative), + f"{func_name} uses SECURITY DEFINER without an explicit search_path.", + ) + ) + continue + + match = grant_re.match(normalized) + if match: + privileges = match.group(1).lower() + object_name = normalize_identifier(match.group(2)) + roles = match.group(3).lower() + if "anon" in roles or "authenticated" in roles or "public" in roles: + severity = ( + "high" + if any(word in privileges for word in ("insert", "update", "delete", "all")) + else "medium" + ) + findings.append( + Finding( + severity, + "grant-review", + str(relative), + f"Grant on {object_name} to {roles.strip()} should be reviewed: {privileges.strip()}", + ) + ) + + for table_name, entry in sorted(tables.items()): + schema = table_name.split(".", 1)[0] + if schema in SYSTEM_SCHEMAS: + continue + + policies = entry["policies"] + if not entry["rls"]: + findings.append( + Finding( + "high", + "missing-rls", + entry["file"], + f"{table_name} is created without row level security.", + ) + ) + continue + + if not policies: + findings.append( + Finding( + "medium", + "no-policies", + entry["file"], + f"{table_name} has RLS enabled but no policies. Confirm deny-all is intentional.", + ) + ) + + if not entry["force"]: + findings.append( + Finding( + "info", + "rls-not-forced", + entry["file"], + f"{table_name} does not use FORCE ROW LEVEL SECURITY. Review whether owner bypass should remain possible.", + ) + ) + + for policy in policies: + command = str(policy["command"]) + if policy["with_check_true"]: + findings.append( + Finding( + "medium", + "broad-policy", + policy["file"], + f"{table_name} has a {command} policy with WITH CHECK (true). Confirm user-driven writes are really unrestricted.", + ) + ) + if policy["using_true"]: + severity = "medium" if command in {"all", "update", "delete"} else "info" + findings.append( + Finding( + severity, + "broad-policy", + policy["file"], + f"{table_name} has a {command} policy with USING (true). Confirm this wide read/write scope is intentional.", + ) + ) + + return findings, tables + + +def audit_code(root: Path) -> list[Finding]: + findings: list[Finding] = [] + + for code_file in collect_files(root, CODE_EXTENSIONS): + relative = code_file.relative_to(root) + text = code_file.read_text() + + if RISKY_PUBLIC_ENV_RE.search(text): + findings.append( + Finding( + "high", + "public-secret", + str(relative), + "A NEXT_PUBLIC_* variable looks like a secret or privileged token.", + ) + ) + + lower = text.lower() + is_client = '"use client"' in lower or "'use client'" in lower or "createbrowserclient" in lower + if "supabase_service_role_key" in lower and is_client: + findings.append( + Finding( + "high", + "client-service-role", + str(relative), + "SUPABASE_SERVICE_ROLE_KEY appears in code that looks client-side.", + ) + ) + + if "dangerouslysetinnerhtml" in lower: + findings.append( + Finding( + "medium", + "dangerous-html", + str(relative), + "dangerouslySetInnerHTML is present. Verify sanitization and trusted content boundaries.", + ) + ) + + if re.search(r"\beval\s*\(", text) or re.search(r"\bnew\s+Function\s*\(", text): + findings.append( + Finding( + "medium", + "dynamic-code", + str(relative), + "Dynamic code execution is present. Confirm untrusted input cannot reach it.", + ) + ) + + return findings + + +def print_findings(findings: list[Finding], sql_files: int, code_files: int, tables_found: int) -> None: + order = {"high": 0, "medium": 1, "info": 2} + grouped = {"high": [], "medium": [], "info": []} + for finding in sorted(findings, key=lambda item: (order[item.severity], item.location, item.kind, item.message)): + grouped[finding.severity].append(finding) + + print(f"Scanned {sql_files} SQL files, {code_files} code files, and {tables_found} tables.") + print() + + for severity in ("high", "medium", "info"): + title = severity.upper() + print(title) + if not grouped[severity]: + print("- none") + else: + for finding in grouped[severity]: + print(f"- [{finding.kind}] {finding.location}: {finding.message}") + print() + + +def main() -> int: + parser = argparse.ArgumentParser(description="Audit a Supabase/Postgres project for common security gaps.") + parser.add_argument("root", nargs="?", default=".", help="Project root to scan") + args = parser.parse_args() + + root = Path(args.root).resolve() + if not root.exists(): + raise SystemExit(f"Path not found: {root}") + if not root.is_dir(): + raise SystemExit(f"Path is not a directory: {root}") + + sql_files = collect_files(root, SQL_EXTENSIONS) + code_files = collect_files(root, CODE_EXTENSIONS) + sql_findings, tables = audit_sql(root) + code_findings = audit_code(root) + findings = sql_findings + code_findings + + print_findings(findings, len(sql_files), len(code_files), len(tables)) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main())