diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..e271920ff --- /dev/null +++ b/.dockerignore @@ -0,0 +1,14 @@ +.git +.env +.env.* +.gstack +.claude +.codex +node_modules +browse/dist +design/dist +coverage +dist +tmp +*.log +.DS_Store \ No newline at end of file diff --git a/.env.example b/.env.example index 04c8f010b..19f051354 100644 --- a/.env.example +++ b/.env.example @@ -1,5 +1,41 @@ -# Copy to .env and fill in values -# bun auto-loads .env — no dotenv needed +# Copy to .env and fill in values. +# This file is the primary local auth source for make/docker workflows. +# Nothing here is baked into images. +# Do not commit or plain-git sync a filled-in .env file. +# If you need cross-machine distribution, use an encrypted secret manager, +# encrypted git workflow, Syncthing, or another local-only transport. +# +# If .env is absent, cloud runners can still inject these variables via their +# own harness or secret manager and the Makefile will pass them through at runtime. +# +# For local interactive use, account login state from ~/.claude and ~/.codex is +# mounted into the container by default, so users with existing Claude Code or +# Codex logins usually do not need API keys for account-backed flows. +# +# General rule for other services: +# - If the service CLI stores login state on disk, mount that config directory +# into the container instead of copying tokens into the image. +# - If the service only supports env-based auth, keep the secret here in .env +# or inject it from your cloud harness at runtime. -# Required for LLM-as-judge evals (bun run test:eval) -ANTHROPIC_API_KEY=sk-ant-your-key-here +# Anthropic account-backed OAuth/session token fallback for non-mounted or +# headless environments. Keep this runtime-only in .env or your cloud secret +# harness; never pass it as a Docker build arg. +ANTHROPIC_AUTH_TOKEN= + +# Anthropic API key fallback for evals or API-only environments +ANTHROPIC_API_KEY= + +# OpenAI / Codex fallback for API-key environments +OPENAI_API_KEY= + +# Gemini fallback for API-key environments +GEMINI_API_KEY= + +# Optional custom endpoints for proxy or self-hosted routing +ANTHROPIC_BASE_URL= +OPENAI_BASE_URL= +GEMINI_BASE_URL= + +# Add other service-specific runtime vars below as needed. Keep them out of the +# Dockerfile and out of build args. diff --git a/.github/docker/Dockerfile.ci b/.github/docker/Dockerfile.ci index 038b25762..a60efabba 100644 --- a/.github/docker/Dockerfile.ci +++ b/.github/docker/Dockerfile.ci @@ -2,6 +2,11 @@ # Rebuild weekly via ci-image.yml, on Dockerfile changes, or on lockfile changes FROM ubuntu:24.04 +ARG BUN_VERSION=1.3.10 +ARG BUN_INSTALL_SHA=bab8acfb046aac8c72407bdcce903957665d655d7acaa3e11c7c4616beae68dd + +SHELL ["/bin/bash", "-o", "pipefail", "-c"] + ENV DEBIAN_FRONTEND=noninteractive # System deps @@ -24,7 +29,18 @@ RUN curl -fsSL https://deb.nodesource.com/setup_22.x | bash - \ # Bun (install to /usr/local so non-root users can access it) ENV BUN_INSTALL="/usr/local" -RUN curl -fsSL https://bun.sh/install | BUN_VERSION=1.3.10 bash +RUN tmpfile="$(mktemp)" \ + && curl -fsSL https://bun.sh/install -o "$tmpfile" \ + && actual_sha="$(sha256sum "$tmpfile" | awk '{print $1}')" \ + && if [ "$actual_sha" != "$BUN_INSTALL_SHA" ]; then \ + echo "ERROR: bun install script checksum mismatch" >&2; \ + echo " expected: $BUN_INSTALL_SHA" >&2; \ + echo " got: $actual_sha" >&2; \ + rm -f "$tmpfile"; \ + exit 1; \ + fi \ + && BUN_VERSION="${BUN_VERSION}" bash "$tmpfile" \ + && rm -f "$tmpfile" # Claude CLI RUN npm i -g @anthropic-ai/claude-code diff --git a/.github/workflows/evals-periodic.yml b/.github/workflows/evals-periodic.yml index 20035c453..1a2fca80c 100644 --- a/.github/workflows/evals-periodic.yml +++ b/.github/workflows/evals-periodic.yml @@ -92,6 +92,18 @@ jobs: with: fetch-depth: 0 + - name: Mask provider secrets + shell: bash + run: | + for secret in \ + "${{ secrets.ANTHROPIC_API_KEY }}" \ + "${{ secrets.OPENAI_API_KEY }}" \ + "${{ secrets.GEMINI_API_KEY }}"; do + if [ -n "$secret" ]; then + echo "::add-mask::$secret" + fi + done + - name: Fix bun temp run: | mkdir -p /home/runner/.cache/bun @@ -126,4 +138,4 @@ jobs: with: name: eval-periodic-${{ matrix.suite.name }} path: ~/.gstack-dev/evals/*.json - retention-days: 90 + retention-days: 14 diff --git a/.github/workflows/evals.yml b/.github/workflows/evals.yml index a7b1fd997..1ba8372c7 100644 --- a/.github/workflows/evals.yml +++ b/.github/workflows/evals.yml @@ -99,6 +99,18 @@ jobs: with: fetch-depth: 0 + - name: Mask provider secrets + shell: bash + run: | + for secret in \ + "${{ secrets.ANTHROPIC_API_KEY }}" \ + "${{ secrets.OPENAI_API_KEY }}" \ + "${{ secrets.GEMINI_API_KEY }}"; do + if [ -n "$secret" ]; then + echo "::add-mask::$secret" + fi + done + # Bun creates root-owned temp dirs during Docker build. GH Actions runs as # runner user with HOME=/github/home. Redirect bun's cache to a writable dir. - name: Fix bun temp @@ -144,7 +156,7 @@ jobs: with: name: eval-${{ matrix.suite.name }} path: ~/.gstack-dev/evals/*.json - retention-days: 90 + retention-days: 14 report: runs-on: ubicloud-standard-2 diff --git a/.github/workflows/security-policy.yml b/.github/workflows/security-policy.yml new file mode 100644 index 000000000..391c323ac --- /dev/null +++ b/.github/workflows/security-policy.yml @@ -0,0 +1,12 @@ +name: Security Policy +on: [push, pull_request] + +jobs: + security-policy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: oven-sh/setup-bun@v2 + - run: bun install --frozen-lockfile + - name: Run security policy checks + run: bun run security:check \ No newline at end of file diff --git a/.gitignore b/.gitignore index 71f7943df..6e88bef42 100644 --- a/.gitignore +++ b/.gitignore @@ -1,20 +1,27 @@ +# Runtime secrets and local auth state .env +.env.local +.env.* +!.env.example +extension/.auth.json +.claude.json + +# Local development and generated state node_modules/ browse/dist/ design/dist/ bin/gstack-global-discover .gstack/ +.gstack-worktrees/ .claude/skills/ .agents/ .factory/ .context/ -extension/.auth.json -.gstack-worktrees/ /tmp/ *.log *.bun-build -.env -.env.local -.env.* -!.env.example supabase/.temp/ + +# Editor-local files that should stay off-repo +.vscode/launch.json +.vscode/extensions.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..812800573 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,5 @@ +{ + "chat.tools.terminal.autoApprove": { + "bun": true + } +} \ No newline at end of file diff --git a/ABOUT.md b/ABOUT.md new file mode 100644 index 000000000..205206ab3 --- /dev/null +++ b/ABOUT.md @@ -0,0 +1,47 @@ +# About This Fork + +This fork exists to harden agent-facing software packs against privacy leaks, manipulative product framing, and hidden operational drift. The original gStack is a large agent pack. This fork keeps the useful mechanics, but treats every skill, prompt, tool, and helper script as something that should be auditable for: + +- outbound telemetry and hosted egress +- dark patterns and authority-driven persuasion +- hidden automation and silent behavioral drift +- brittle generation pipelines and stale packaged artifacts +- unsafe defaults that create dependency or pressure rather than informed use + +## What This Fork Is For + +This is a working base for auditing and hardening: + +- skill packs +- prompt packs +- agent packs +- shell tool bundles +- browser automation helpers +- review and routing frameworks + +If a pack can influence user behavior or move data off the machine, it ***should be testable*** under this same health harness. + +## Operating Principles + +1. Local-first by default
Hosted telemetry, update checks, and community dashboards should be optional at most, and easy to hard-disable. + +2. No manipulation as product strategy
Do not rely on identity expansion, shame, urgency, status signaling, founder conversion, or dependency-building copy to drive adoption. + +3. Auditability over charisma
Generated docs, shared preambles, and packaged prompts should be traceable back to source and checked continuously. + +4. Reuse over one-off cleanup
Every problem class that appears more than once should become a harness rule, test fixture, or reusable policy check. + +## Included Here + +- a reusable stack-health harness +- fixture-backed manipulation-policy tests +- a one-switch no-egress mode for the active hosted paths in this repo +- documentation for reusing the same auditing model across other packs + +## Long-Term Direction + +The long-term goal is to use this repo as a reference implementation for auditing all skill, tool, and agent packs with the same basic question: + +> Does this pack *respect user autonomy*, *keep data local* unless explicitly allowed, and *remain understandable* under change? + +***If not, it should fail review, fail policy, and be rewritten until it does!*** diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fcb9c279a..4e3d9c194 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -115,6 +115,8 @@ bun install Bun auto-loads `.env` — no extra config. Conductor workspaces inherit `.env` from the main worktree automatically (see "Conductor workspaces" below). +Treat `.env` as local-only state. Do not commit it or sync it through plain git. If you need the same secrets on multiple machines, use a secret manager, encrypted git workflow, Syncthing, or another local-only transport outside this repo. + ### Test tiers | Tier | Command | Cost | What it tests | @@ -126,6 +128,7 @@ Bun auto-loads `.env` — no extra config. Conductor workspaces inherit `.env` f ```bash bun test # Tier 1 only (runs on every commit, <5s) +bun run security:check # Repo security and local-state policy gate bun run test:e2e # Tier 2: E2E only (needs EVALS=1, can't run inside Claude Code) bun run test:evals # Tier 2 + 3 combined (~$4/run) ``` diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..48d743183 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,90 @@ +# syntax=docker/dockerfile:1.7 + +ARG BASE_IMAGE=mcr.microsoft.com/playwright:v1.58.2-noble +ARG BUN_VERSION=1.3.10 +ARG BUN_INSTALL_SHA=bab8acfb046aac8c72407bdcce903957665d655d7acaa3e11c7c4616beae68dd + +FROM ${BASE_IMAGE} AS base + +ARG BUN_VERSION +ARG BUN_INSTALL_SHA + +SHELL ["/bin/bash", "-o", "pipefail", "-c"] + +ENV DEBIAN_FRONTEND=noninteractive \ + HOME=/root \ + BUN_INSTALL=/usr/local \ + PLAYWRIGHT_BROWSERS_PATH=/ms-playwright \ + PATH=/usr/local/bin:/usr/local/sbin:/usr/sbin:/usr/bin:/sbin:/bin + +RUN apt-get update && apt-get install -y --no-install-recommends \ + bash \ + ca-certificates \ + curl \ + git \ + jq \ + python3 \ + unzip \ + && rm -rf /var/lib/apt/lists/* + +RUN mkdir -p /root/.claude /root/.codex /workspace + +RUN cat <<'EOF' >/usr/local/bin/gstack-container-init +#!/usr/bin/env bash +set -euo pipefail + +mkdir -p /root/.claude /root/.codex /root/.config /root/.cache /root/.local/share + +# Persist Claude's top-level config file inside the Claude volume. +if [ ! -L /root/.claude.json ]; then + rm -f /root/.claude.json + ln -s /root/.claude/.claude.json /root/.claude.json +fi + +# If Claude left only a backup, restore the newest backup as the primary config. +if [ ! -s /root/.claude/.claude.json ]; then + latest_backup="$(ls -1t /root/.claude/backups/.claude.json.backup.* 2>/dev/null | head -n 1 || true)" + if [ -n "$latest_backup" ]; then + cp "$latest_backup" /root/.claude/.claude.json + fi +fi + +exec "$@" +EOF +RUN chmod +x /usr/local/bin/gstack-container-init + +RUN tmpfile="$(mktemp)" \ + && curl -fsSL https://bun.sh/install -o "$tmpfile" \ + && actual_sha="$(sha256sum "$tmpfile" | awk '{print $1}')" \ + && if [ "$actual_sha" != "$BUN_INSTALL_SHA" ]; then \ + echo "ERROR: bun install script checksum mismatch" >&2; \ + echo " expected: $BUN_INSTALL_SHA" >&2; \ + echo " got: $actual_sha" >&2; \ + rm -f "$tmpfile"; \ + exit 1; \ + fi \ + && BUN_VERSION="${BUN_VERSION}" bash "$tmpfile" \ + && rm -f "$tmpfile" +RUN bun install -g @anthropic-ai/claude-code + +WORKDIR /workspace + +FROM base AS deps + +COPY package.json bun.lock* ./ +RUN bun install --frozen-lockfile 2>/dev/null || bun install + +FROM deps AS build + +COPY . . +RUN bun run build + +FROM base AS runtime + +COPY package.json bun.lock* ./ +RUN bun install --frozen-lockfile 2>/dev/null || bun install + +COPY --from=build /workspace /workspace + +ENTRYPOINT ["/usr/local/bin/gstack-container-init"] +CMD ["bash"] \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..3f5bd0dfa --- /dev/null +++ b/Makefile @@ -0,0 +1,475 @@ +# ============================================================================= +# gstack Make Workflow +# ============================================================================= +# Org-standard build/release wrapper, retargeted to gstack's Bun + Playwright +# toolchain while preserving the standard GHCR, git-flow, and release targets. +# +# Runs on: Linux, macOS, Windows (WSL) +# Requires: make, bash, git, python3, bun, container runtime (podman or docker) +# +# Quick start: +# make it_build — build the runtime image +# make it_run — smoke test the image +# make it_build_n_run — build + smoke test +# make help — list all targets +# ============================================================================= + +.DEFAULT_GOAL := help + +# Load environment variables from .env if it exists +ifneq (,$(wildcard ./.env)) + include .env + export +endif + +# Auto-detect container runtime (prefer podman, fall back to docker) +CONTAINER_RUNTIME ?= $(shell command -v podman 2>/dev/null || echo docker) + +# Derive org/repo from git remote (e.g. git@github.com:Sage-is/DB-sage-pb.git -> sage-is/db-sage-pb) +GIT_REPO_SLUG := $(shell git remote get-url origin 2>/dev/null | sed -E 's|\.git$$||; s|.*[:/]([^/]+/[^/]+)$$|\1|' | tr '[:upper:]' '[:lower:]') + +# Configuration variables with defaults (override with .env file) +IMAGE_NAME ?= $(GIT_REPO_SLUG) +GHCR_IMAGE_NAME ?= ghcr.io/$(GIT_REPO_SLUG) +CURRENT_VERSION := $(shell cat VERSION 2>/dev/null || echo 0.0.0.0) +IMAGE_TAG ?= $(CURRENT_VERSION) +GIT_TAG := $(shell git tag --sort=-v:refname | sed 's/^v//' | head -n 1) +GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD) +ifeq ($(GIT_BRANCH),HEAD) + GIT_BRANCH := $(shell git describe --tags --exact-match 2>/dev/null || git rev-parse --short HEAD) +endif +SAFE_GIT_BRANCH := $(subst /,-,$(GIT_BRANCH)) +SAFE_GIT_BRANCH := $(shell echo $(SAFE_GIT_BRANCH) | tr '[:upper:]' '[:lower:]') +CONTAINER_NAME ?= $(shell echo $(GIT_REPO_SLUG) | tr '/' '-') +DOCKERFILE ?= Dockerfile +BUILD_CONTEXT ?= . +BASE_IMAGE ?= mcr.microsoft.com/playwright:v1.58.2-noble +BUN_VERSION ?= 1.3.10 +RUN_COMMAND ?= bun run skill:check +TEST_COMMAND ?= bun test +EXPLORE_SHELL ?= bash +CLAUDE_COMMAND ?= claude +AUTH_ENV_FILE ?= .env +ENV_PASSTHROUGH_VARS ?= ANTHROPIC_API_KEY OPENAI_API_KEY GEMINI_API_KEY ANTHROPIC_AUTH_TOKEN ANTHROPIC_BASE_URL OPENAI_BASE_URL GEMINI_BASE_URL +ENABLE_ACCOUNT_MOUNTS ?= 0 +ENABLE_STATE_VOLUMES ?= 1 +CLAUDE_CONFIG_DIR ?= $(HOME)/.claude +CODEX_CONFIG_DIR ?= $(HOME)/.codex +STATE_VOLUME_PREFIX ?= $(CONTAINER_NAME) + +# Release version detection (prefers release/* or hotfix/* branch name, falls back to VERSION) +RELEASE_VERSION := $(shell git rev-parse --abbrev-ref HEAD | sed -n -e 's/^release\///p' -e 's/^hotfix\///p') +ifeq ($(RELEASE_VERSION),) + RELEASE_VERSION := $(CURRENT_VERSION) +endif + +help: + @echo "=======================================================" + @echo " $(IMAGE_NAME) — gstack" + @echo "" + @echo "Usage examples:" + @echo " 1) Build: make it_build" + @echo " 2) Run: make it_run" + @echo " 3) Explore: make it_explore" + @echo " 4) Claude: make it_claude" + @echo " 5) Health check: make health_check" + @echo " 6) Security check: make security_check" + @echo " 7) Push to GHCR: make it_build_multi_arch_push_GHCR" + @echo "" + @echo "Auth runtime:" + @echo " - Uses $(AUTH_ENV_FILE) when present" + @echo " - Falls through caller/CI env vars when $(AUTH_ENV_FILE) is absent" + @echo " - Uses named Docker volumes for ~/.claude, ~/.codex, ~/.config, ~/.cache, and ~/.local/share by default" + @echo " - Set ENABLE_ACCOUNT_MOUNTS=1 to opt into host ~/.claude and ~/.codex mounts" + @echo "" + @echo "Available make commands:" + @echo "" + @LC_ALL=C $(MAKE) -pRrq -f $(firstword $(MAKEFILE_LIST)) : 2>/dev/null \ + | awk -v RS= -F: '/(^|\n)# Files(\n|$$)/,/(^|\n)# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | sort | grep -E -v -e '^[^[:alnum:]]' -e '^$$@$$' + @echo "" + +# --------------------------------------------------------------------------- +# Common docker run arguments +# --------------------------------------------------------------------------- +# Auth stays runtime-only: prefer .env, then allow selected caller env passthrough. +ENV_FILE_FLAG := $(if $(wildcard $(AUTH_ENV_FILE)),--env-file $(AUTH_ENV_FILE),) +DOCKER_RUN_ENV_PASSTHROUGH := $(foreach var,$(ENV_PASSTHROUGH_VARS),$(if $(value $(var)),-e $(var),)) + +DOCKER_RUN_AUTH_MOUNTS := +ifeq ($(ENABLE_ACCOUNT_MOUNTS),1) +ifneq (,$(wildcard $(CLAUDE_CONFIG_DIR))) +DOCKER_RUN_AUTH_MOUNTS += -v $(CLAUDE_CONFIG_DIR):/root/.claude +endif +ifneq (,$(wildcard $(CODEX_CONFIG_DIR))) +DOCKER_RUN_AUTH_MOUNTS += -v $(CODEX_CONFIG_DIR):/root/.codex +endif +endif + +DOCKER_RUN_STATE_MOUNTS := +ifeq ($(ENABLE_STATE_VOLUMES),1) +DOCKER_RUN_STATE_MOUNTS += -v $(STATE_VOLUME_PREFIX)-config:/root/.config +DOCKER_RUN_STATE_MOUNTS += -v $(STATE_VOLUME_PREFIX)-cache:/root/.cache +DOCKER_RUN_STATE_MOUNTS += -v $(STATE_VOLUME_PREFIX)-local-share:/root/.local/share +ifeq ($(ENABLE_ACCOUNT_MOUNTS),0) +DOCKER_RUN_STATE_MOUNTS += -v $(STATE_VOLUME_PREFIX)-claude:/root/.claude +DOCKER_RUN_STATE_MOUNTS += -v $(STATE_VOLUME_PREFIX)-codex:/root/.codex +else +ifeq (,$(wildcard $(CLAUDE_CONFIG_DIR))) +DOCKER_RUN_STATE_MOUNTS += -v $(STATE_VOLUME_PREFIX)-claude:/root/.claude +endif +ifeq (,$(wildcard $(CODEX_CONFIG_DIR))) +DOCKER_RUN_STATE_MOUNTS += -v $(STATE_VOLUME_PREFIX)-codex:/root/.codex +endif +endif +endif + +DOCKER_RUN_BASE_ARGS := --rm \ + $(ENV_FILE_FLAG) \ + $(DOCKER_RUN_ENV_PASSTHROUGH) \ + $(DOCKER_RUN_AUTH_MOUNTS) \ + $(DOCKER_RUN_STATE_MOUNTS) + +DOCKER_RUN_ARGS := $(DOCKER_RUN_BASE_ARGS) \ + --name $(CONTAINER_NAME) + +# --------------------------------------------------------------------------- +# Container lifecycle +# --------------------------------------------------------------------------- +it_stop: + $(CONTAINER_RUNTIME) rm -f $(CONTAINER_NAME) + +it_clean: + $(CONTAINER_RUNTIME) system prune -f + $(CONTAINER_RUNTIME) builder prune --force + @echo "" + +it_gone: + @echo "Forcefully stopping and removing $(CONTAINER_NAME)..." + $(CONTAINER_RUNTIME) stop $(CONTAINER_NAME) || true + $(CONTAINER_RUNTIME) rm -f $(CONTAINER_NAME) || true + @echo "Container $(CONTAINER_NAME) has been removed" + +# --------------------------------------------------------------------------- +# Build +# --------------------------------------------------------------------------- +it_build: + @echo "Building Docker image with BuildKit enabled..." + @export DOCKER_BUILDKIT=1 && \ + $(CONTAINER_RUNTIME) build --load \ + -f $(DOCKERFILE) \ + --build-arg BASE_IMAGE=$(BASE_IMAGE) \ + --build-arg BUN_VERSION=$(BUN_VERSION) \ + -t $(IMAGE_NAME):$(IMAGE_TAG) \ + -t $(IMAGE_NAME):latest \ + -t $(IMAGE_NAME):$(IMAGE_TAG)-$(SAFE_GIT_BRANCH) \ + -t $(IMAGE_NAME):$(SAFE_GIT_BRANCH) \ + $(BUILD_CONTEXT) + @echo "" + +it_build_no_cache: + @echo "Building Docker image without cache..." + @export DOCKER_BUILDKIT=1 && \ + $(CONTAINER_RUNTIME) build --no-cache --load \ + -f $(DOCKERFILE) \ + --build-arg BASE_IMAGE=$(BASE_IMAGE) \ + --build-arg BUN_VERSION=$(BUN_VERSION) \ + -t $(IMAGE_NAME):$(IMAGE_TAG) \ + -t $(IMAGE_NAME):latest \ + -t $(IMAGE_NAME):$(IMAGE_TAG)-$(SAFE_GIT_BRANCH) \ + -t $(IMAGE_NAME):$(SAFE_GIT_BRANCH) \ + $(BUILD_CONTEXT) + @echo "" + +# --------------------------------------------------------------------------- +# Run +# --------------------------------------------------------------------------- +it_run: + $(CONTAINER_RUNTIME) run $(DOCKER_RUN_ARGS) $(IMAGE_NAME):$(IMAGE_TAG) bash -lc '$(RUN_COMMAND)' + +it_run_ghcr: + $(CONTAINER_RUNTIME) run $(DOCKER_RUN_ARGS) $(GHCR_IMAGE_NAME):$(IMAGE_TAG) bash -lc '$(RUN_COMMAND)' + +# Combined build and run +it_build_n_run: it_build + @make it_run + +it_build_n_run_no_cache: it_build_no_cache + @make it_run + +# Run image with the current checkout bind-mounted for local development +it_explore: + @$(CONTAINER_RUNTIME) run $(DOCKER_RUN_BASE_ARGS) -it \ + -v $$(pwd):/workspace \ + -w /workspace \ + --name $(CONTAINER_NAME)-dev \ + $(IMAGE_NAME):$(IMAGE_TAG) \ + $(EXPLORE_SHELL) + +it_claude: + @$(CONTAINER_RUNTIME) run $(DOCKER_RUN_BASE_ARGS) -it \ + -v $$(pwd):/workspace \ + -w /workspace \ + --name $(CONTAINER_NAME)-claude \ + $(IMAGE_NAME):$(IMAGE_TAG) \ + bash -lc '$(CLAUDE_COMMAND)' + +it_run_dev: it_explore + +# Build and run tests in a fresh container +it_build_n_test_fresh: it_build + @echo "Running tests in a fresh container..." + $(CONTAINER_RUNTIME) run $(DOCKER_RUN_BASE_ARGS) $(IMAGE_NAME):$(IMAGE_TAG) bash -lc '$(TEST_COMMAND)' + @echo "Fresh container test run complete." + +# --------------------------------------------------------------------------- +# Local test harness +# --------------------------------------------------------------------------- +test: + bun test + +test_fresh: + $(CONTAINER_RUNTIME) run $(DOCKER_RUN_BASE_ARGS) $(IMAGE_NAME):$(IMAGE_TAG) bash -lc '$(TEST_COMMAND)' + +security_check: + bun run security:check + +# --------------------------------------------------------------------------- +# Stack health workflow +# --------------------------------------------------------------------------- +health_check: + bun run stack:health + +health_check_json: + bun run stack:health --json + +health_check_strict: + bun run stack:health --strict-warnings + +health_note_init: + @mkdir -p .stack-health + @NOTE=.stack-health/cleanup-note.md; \ + if [ -f $$NOTE ]; then \ + echo "Cleanup note already exists at $$NOTE"; \ + else \ + printf '# Stack Health Cleanup Note\n\n- Date: %s\n- Branch: %s\n- Commit hash: TODO\n\n## Changes\n- TODO\n\n## Validation\n- TODO\n' "$$(date -u +%Y-%m-%d)" "$(GIT_BRANCH)" > $$NOTE; \ + echo "Created $$NOTE"; \ + fi + +health_note_record_hash: + @NOTE=.stack-health/cleanup-note.md; \ + if [ ! -f $$NOTE ]; then \ + echo "Error: $$NOTE does not exist. Run 'make health_note_init' first."; \ + exit 1; \ + fi; \ + HASH="$${HASH:-$$(git rev-parse HEAD)}"; \ + python3 -c "from pathlib import Path; note = Path('.stack-health/cleanup-note.md'); content = note.read_text(); marker = '- Commit hash: TODO'; assert marker in content, 'Cleanup note does not contain the commit hash placeholder'; note.write_text(content.replace(marker, '- Commit hash: ' + '$$HASH', 1)); print(f'Updated {note} with commit hash ' + '$$HASH')" + +# --------------------------------------------------------------------------- +# GHCR (GitHub Container Registry) +# --------------------------------------------------------------------------- +ghcr_login: + @echo "=== Logging into GHCR via gh CLI ===" + @gh auth status >/dev/null 2>&1 || { echo "Error: gh CLI not authenticated. Run: gh auth login"; exit 1; } + @gh auth token | docker login ghcr.io -u $$(gh api user -q .login) --password-stdin + @echo "Logged into ghcr.io as $$(gh api user -q .login)" + @echo "" + @echo "If push is denied, ensure your token has write:packages scope:" + @echo " gh auth refresh -s write:packages" + +# Ensure buildx builder exists +ensure_builder: + @docker buildx inspect multi-arch-builder >/dev/null 2>&1 || docker buildx create --name multi-arch-builder --use + +# Multi-architecture build+push helper +define build_multi_arch + @make it_clean + @make ensure_builder + docker buildx build --platform linux/amd64,linux/arm64 \ + -f $(DOCKERFILE) \ + --build-arg BASE_IMAGE=$(BASE_IMAGE) \ + --build-arg BUN_VERSION=$(BUN_VERSION) \ + -t $(1):$(IMAGE_TAG) \ + -t $(1):latest \ + --push $(BUILD_CONTEXT) +endef + +# Push current image tags to GHCR +it_deploy: it_build_multi_arch_push_GHCR + +it_build_multi_arch_push_GHCR: ghcr_login + @echo "Building multi-arch and pushing to GHCR" + $(call build_multi_arch,$(GHCR_IMAGE_NAME)) + @echo "Completed GHCR multi-arch push for version $(IMAGE_TAG)" + +# --------------------------------------------------------------------------- +# Version / Release (git-flow) +# --------------------------------------------------------------------------- +show_version: + @echo "Current version: $(CURRENT_VERSION)" + +bump_release_version: + @if [ -z "$(RELEASE_VERSION)" ]; then \ + echo "Error: RELEASE_VERSION not defined. Are you on a release/ or hotfix/ branch?"; \ + exit 1; \ + fi + @echo "Bumping version to $(RELEASE_VERSION)..." + @python3 -c "import json; from pathlib import Path; version = '$(RELEASE_VERSION)'.lstrip('v'); Path('VERSION').write_text(version + '\\n'); pkg_path = Path('package.json'); pkg = json.loads(pkg_path.read_text()); pkg['version'] = version; pkg_path.write_text(json.dumps(pkg, indent=2) + '\\n'); print(f'Updated VERSION and package.json to {version}')" + @echo "Version bumped to $(RELEASE_VERSION)" + +# Initial release (one-time, when no tags exist yet) +first_release: require_gitflow_next + git flow release start 0.0.1.0 + @echo "" + @echo "=== First release branch created (release/0.0.1.0) ===" + @echo "Next steps:" + @echo " 1. make bump_release_version # Update VERSION + package.json" + @echo " 2. git add VERSION package.json && git commit" + @echo " 3. make it_build_n_run # Build + smoke test" + @echo " 4. make release_and_push_GHCR # Finish release + push to GHCR" + +require_gitflow_next: + @if ! git flow version 2>/dev/null | grep -q 'git-flow-next'; then \ + echo "Error: git-flow-next required (Go rewrite). Install: brew install git-flow-next"; \ + exit 1; \ + fi + +minor_release: require_gitflow_next + @# Start a minor release with incremented minor version + git flow release start $$(awk -F'.' '{print $$1"."$$2+1".0.0"}' VERSION) + @echo "" + @echo "=== Release branch created ===" + @echo "Next steps:" + @echo " 1. make bump_release_version # Update VERSION + package.json" + @echo " 2. git add VERSION package.json && git commit" + @echo " 3. make it_build # Build Docker image" + @echo " 4. make it_run # Smoke test" + @echo " 5. make ghcr_login # Authenticate with GHCR" + @echo " 6. make release_and_push_GHCR # Finish release + push to GHCR" + +patch_release: require_gitflow_next + @# Start a patch release with incremented patch version + git flow release start $$(awk -F'.' '{print $$1"."$$2"."$$3+1".0"}' VERSION) + @echo "" + @echo "=== Release branch created ===" + @echo "Next steps:" + @echo " 1. make bump_release_version # Update VERSION + package.json" + @echo " 2. git add VERSION package.json && git commit" + @echo " 3. make it_build # Build Docker image" + @echo " 4. make it_run # Smoke test" + @echo " 5. make ghcr_login # Authenticate with GHCR" + @echo " 6. make release_and_push_GHCR # Finish release + push to GHCR" + +major_release: require_gitflow_next + @# Start a major release with incremented major version + git flow release start $$(awk -F'.' '{print $$1+1".0.0.0"}' VERSION) + @echo "" + @echo "=== Release branch created ===" + @echo "Next steps:" + @echo " 1. make bump_release_version # Update VERSION + package.json" + @echo " 2. git add VERSION package.json && git commit" + @echo " 3. make it_build # Build Docker image" + @echo " 4. make it_run # Smoke test" + @echo " 5. make ghcr_login # Authenticate with GHCR" + @echo " 6. make release_and_push_GHCR # Finish release + push to GHCR" + +hotfix: require_gitflow_next + @# Start a hotfix with incremented micro version (fourth component) + git flow hotfix start $$(awk -F'.' '{if (NF < 4) print $$1"."$$2"."$$3".1"; else print $$1"."$$2"."$$3"."$$4+1}' VERSION) + @echo "" + @echo "=== Hotfix branch created ===" + @echo "Next steps:" + @echo " 1. Fix the issue" + @echo " 2. make bump_release_version # Update VERSION + package.json" + @echo " 3. git add VERSION package.json && git commit # Commit fix + version bump" + @echo " 4. make it_build # Build Docker image" + @echo " 5. make it_run # Smoke test" + @echo " 6. make ghcr_login # Authenticate with GHCR" + @echo " 7. make hotfix_and_push_GHCR # Finish hotfix + push to GHCR" + +# Helper: clear stale git-flow merge state from a prior interrupted finish +define clear_stale_gitflow_state + if [ -f .git/gitflow/state/merge.json ] && [ ! -f .git/MERGE_HEAD ]; then \ + echo "Clearing stale git-flow merge state from prior run..."; \ + rm -f .git/gitflow/state/merge.json; \ + fi +endef + +release_finish: require_gitflow_next + @echo "=== Finishing release $(RELEASE_VERSION) ===" + @# Step 1: Clear stale git-flow state if no real merge is in progress + @$(clear_stale_gitflow_state) + @# Step 2: Try git-flow finish (--no-fetch: release branches are never pushed) + @# Step 3: If git-flow fails, do the merge/tag/cleanup manually + @git flow release finish --no-fetch || ( \ + echo ""; \ + echo "git-flow finish failed — completing release/$(RELEASE_VERSION) manually..."; \ + rm -f .git/gitflow/state/merge.json; \ + git checkout master && \ + git merge --no-ff --no-edit release/$(RELEASE_VERSION) && \ + (git tag -a "$(RELEASE_VERSION)" -m "Release $(RELEASE_VERSION)" 2>/dev/null || echo " Tag $(RELEASE_VERSION) already exists") && \ + git checkout develop && \ + git merge --no-ff --no-edit master && \ + git branch -d release/$(RELEASE_VERSION) \ + ) + @# Step 4: Push everything + @git push origin develop && git push origin master && git push --tags + @git checkout develop + @echo "" + @echo "=== Release $(RELEASE_VERSION) complete ===" + @echo "Tag: $$(git tag --sort=-v:refname | head -n 1)" + +hotfix_finish: require_gitflow_next + @echo "=== Finishing hotfix ===" + @$(clear_stale_gitflow_state) + @git flow hotfix finish --no-fetch || ( \ + echo ""; \ + echo "git-flow hotfix finish failed — completing manually..."; \ + rm -f .git/gitflow/state/merge.json; \ + HOTFIX_VER=$$(git rev-parse --abbrev-ref HEAD | sed 's/^hotfix\///'); \ + git checkout master && \ + git merge --no-ff --no-edit hotfix/$$HOTFIX_VER && \ + (git tag -a "$$HOTFIX_VER" -m "Hotfix $$HOTFIX_VER" 2>/dev/null || echo " Tag $$HOTFIX_VER already exists") && \ + git checkout develop && \ + git merge --no-ff --no-edit master && \ + git branch -d hotfix/$$HOTFIX_VER \ + ) + @git push origin develop && git push origin master && git push --tags + @git checkout develop + +release_and_push_GHCR: release_finish + @echo "" + @echo "=== Building and pushing to GHCR ===" + @make it_build_multi_arch_push_GHCR + @echo "" + @VTAG=$$(git tag --sort=-v:refname | sed 's/^v//' | head -n 1); \ + echo "=== Release $$VTAG published ==="; \ + echo "Verify: docker pull $(GHCR_IMAGE_NAME):$$VTAG"; \ + echo "Verify: docker pull $(GHCR_IMAGE_NAME):latest" + +hotfix_and_push_GHCR: hotfix_finish + @echo "" + @echo "=== Building and pushing to GHCR ===" + @make it_build_multi_arch_push_GHCR + @echo "" + @VTAG=$$(git tag --sort=-v:refname | sed 's/^v//' | head -n 1); \ + echo "=== Hotfix $$VTAG published ==="; \ + echo "Verify: docker pull $(GHCR_IMAGE_NAME):$$VTAG"; \ + echo "Verify: docker pull $(GHCR_IMAGE_NAME):latest" + +.PHONY: release help it_stop it_clean it_gone \ + it_build it_build_no_cache it_run it_run_dev it_run_ghcr \ + it_explore it_claude \ + it_build_n_run it_build_n_run_no_cache it_build_n_test_fresh \ + it_deploy ghcr_login ensure_builder it_build_multi_arch_push_GHCR \ + health_check health_check_json health_check_strict health_note_init health_note_record_hash \ + show_version bump_release_version first_release require_gitflow_next \ + minor_release patch_release major_release hotfix \ + release_finish hotfix_finish \ + release_and_push_GHCR hotfix_and_push_GHCR \ + test test_fresh security_check + +# --------------------------------------------------------------------------- +# Interactive release entrypoint +# --------------------------------------------------------------------------- +release: + @echo "Choose a release target: make minor_release | make patch_release | make major_release | make hotfix" diff --git a/README.md b/README.md index 14147a293..835371530 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,6 @@ -# gstack +# s-gstack + +> launch and work safely in a container right now > "I don't think I've typed like a line of code probably since December, basically, which is an extremely large change." — [Andrej Karpathy](https://fortune.com/2026/03/21/andrej-karpathy-openai-cofounder-ai-agents-coding-state-of-psychosis-openclaw/), No Priors podcast, March 2026 @@ -246,12 +248,118 @@ I open sourced how I build software. You can fork it and make it your own. | Doc | What it covers | |-----|---------------| | [Skill Deep Dives](docs/skills.md) | Philosophy, examples, and workflow for every skill (includes Greptile integration) | +| [Stack Health Harness](docs/stack-health-harness.md) | Reusable health scanner for telemetry, manipulation-pattern, and integrity checks | +| [Fork About](ABOUT.md) | Neutral fork description for a health-hardened, reusable agent-pack audit distribution | | [Builder Ethos](ETHOS.md) | Builder philosophy: Boil the Lake, Search Before Building, three layers of knowledge | | [Architecture](ARCHITECTURE.md) | Design decisions and system internals | | [Browser Reference](BROWSER.md) | Full command reference for `/browse` | | [Contributing](CONTRIBUTING.md) | Dev setup, testing, contributor mode, and dev mode | | [Changelog](CHANGELOG.md) | What's new in every version | +## Stack Health + +gstack now includes a reusable stack-health harness for catching both hard integrity regressions and softer policy debt. + +- `bun run stack:health` runs the full profile. +- `bun run stack:health --json` emits machine-readable output. +- `bun run stack:health --strict-warnings` fails on warnings as well as errors. +- `bun run stack:health --root /path/to/other-pack` audits another skill, tool, or agent pack. +- `make health_check` runs the same profile through the repo-standard Makefile. +- `make health_check_strict` turns warnings into a failing build gate. +- `make health_note_init` creates a cleanup note with a commit-hash placeholder. +- `make health_note_record_hash` fills that placeholder from `git rev-parse HEAD` or `HASH=...`. + +The harness currently checks: + +- generated skill freshness and touchfile consistency +- audit invariants already enforced elsewhere in the repo +- outbound telemetry and hosted-egress surfaces +- founder funneling, authority conversion, and identity-expansion copy +- mania-adjacent productivity hype, coercive urgency, and dependency-building language + +For the repo security gate specifically: + +- `bun run security:check` runs the focused audit-compliance suite plus the stack-health audit gate. +- `make security_check` runs the same policy locally before you push. + +The full rule profile and fixture-backed manipulation-policy tests are documented in [docs/stack-health-harness.md](docs/stack-health-harness.md). + +## Containers & GHCR + +The root `Makefile` now keeps the standard org release/GHCR flow, but retargets it to gstack's Bun and Playwright runtime. + +- `make it_build` builds the runtime image from the root `Dockerfile`. +- `make it_run` smoke-tests the image with `bun run skill:check`. +- `make it_explore` opens an interactive `-it` shell with the current repo bind-mounted into `/workspace`. +- `make it_claude` jumps straight into `claude` inside that same interactive container environment. +- `make it_run_dev` is kept as an alias for `make it_explore`. +- `make it_build_multi_arch_push_GHCR` publishes the runtime image to `ghcr.io//`. +- `make minor_release`, `make patch_release`, `make major_release`, and `make hotfix` preserve the existing git-flow release entrypoints. + +The Docker build accepts an overridable base image, so the team can swap in its standard container without rewriting the file: + +- `make it_build BASE_IMAGE=ghcr.io/your-org/your-ai-base:latest` + +Version bumps in the Makefile now update both `VERSION` and `package.json`, which keeps container tags and Bun package metadata aligned. + +The runtime image also installs `claude-code` with Bun, so interactive exploration containers have both `bun` and `claude` available out of the box. + +If you want to override the Claude entry command, pass `CLAUDE_COMMAND`, for example: + +- `make it_claude CLAUDE_COMMAND='claude --help'` + +Auth handling is runtime-only: + +- `.env` is the primary auth source for `make it_run`, `make it_run_dev`, and `make test_fresh`. +- If `.env` is missing, selected env vars from the caller or cloud harness are forwarded into the container by name. +- No auth values are passed as Docker build args or baked into image layers. +- The default container path is isolated: it uses container-owned persisted volumes for Claude/Codex and other home-directory state, so it does not touch your personal `~/.claude` or `~/.codex`. +- If you explicitly want to reuse host account login state, set `ENABLE_ACCOUNT_MOUNTS=1`. +- If you need to run without mounted home-directory state, put the account-backed Anthropic session token in `.env` as `ANTHROPIC_AUTH_TOKEN`; treat it like any other secret and rotate it when the upstream login changes. +- Leave `ENABLE_ACCOUNT_MOUNTS=0` for the safer default that avoids polluting personal host state. + +Persistence across container restarts works in two layers: + +- The repo itself is bind-mounted into `/workspace`, so dotfiles inside the repo continue to live on the host checkout. +- Named Docker volumes back `/root/.claude` and `/root/.codex` by default, so container auth/session state survives restarts without writing into your personal host directories. +- If you opt in with `ENABLE_ACCOUNT_MOUNTS=1`, host `~/.claude` and `~/.codex` are mounted instead. +- Additional container-side state in `/root/.config`, `/root/.cache`, and `/root/.local/share` is stored in named Docker volumes by default, so CLI/session state survives container teardown and recreation. +- Set `ENABLE_STATE_VOLUMES=0` if you want fully ephemeral container-side state. + +The same rule applies to other services: + +- If a service has a CLI with persistent login state, mount that config directory into the container instead of baking or copying credentials. +- If a service only supports env-based auth, keep its secret in `.env` locally or inject it from your cloud harness when `.env` is absent. +- Extend `ENV_PASSTHROUGH_VARS` and add the mount in the `Makefile` when you add a new provider-specific CLI. + +### Use The Published Image Without Cloning gstack + +You do not need a full checkout of this repo to use the published container. Pull the image, mount the project you want to work on into `/workspace`, and inject auth at runtime. + +```bash +docker pull ghcr.io/opencoca/gstack:latest + +docker run --rm -it \ + --env-file .env \ + -v "$PWD":/workspace \ + -v gstack-claude:/root/.claude \ + -v gstack-codex:/root/.codex \ + -v gstack-config:/root/.config \ + -v gstack-cache:/root/.cache \ + -v gstack-local-share:/root/.local/share \ + -w /workspace \ + ghcr.io/opencoca/gstack:latest \ + bash -lc 'claude' +``` + +Use the same pattern for `bun`, `claude auth status`, or any other runtime command. The image only needs the repo or project you actually want to operate on mounted into `/workspace`. + +Keep local-only state out of git: + +- Do not commit or plain-git sync `.env`, auth tokens, Claude/Codex state, or Docker volume contents. +- Treat `/root/.claude`, `/root/.codex`, `/root/.config`, `/root/.cache`, and `/root/.local/share` as machine-local state even when they persist across container restarts. +- If you need to move secrets or local agent state between machines, use an encrypted secret manager, encrypted git workflow, Syncthing, or another local-only transport instead of this repo. + ## Privacy & Telemetry gstack includes **opt-in** usage telemetry to help improve the project. Here's exactly what happens: @@ -262,6 +370,12 @@ gstack includes **opt-in** usage telemetry to help improve the project. Here's e - **What's never sent:** code, file paths, repo names, branch names, prompts, or any user-generated content. - **Change anytime:** `gstack-config set telemetry off` disables everything instantly. +For a stronger one-switch privacy mode that disables hosted telemetry sync, remote update checks, and community dashboard network calls, set: + +- `gstack-config set network_egress off` + +This keeps local-only analytics available while shutting off the hosted egress paths. + Data is stored in [Supabase](https://supabase.com) (open source Firebase alternative). The schema is in [`supabase/migrations/`](supabase/migrations/) — you can verify exactly what's collected. The Supabase publishable key in the repo is a public key (like a Firebase API key) — row-level security policies deny all direct access. Telemetry flows through validated edge functions that enforce schema checks, event type allowlists, and field length limits. **Local analytics are always available.** Run `gstack-analytics` to see your personal usage dashboard from the local JSONL file — no remote data needed. diff --git a/TODOS.md b/TODOS.md index a82a7826a..20da0fea7 100644 --- a/TODOS.md +++ b/TODOS.md @@ -1,5 +1,36 @@ # TODOS +## Current Focus + +### Use and human testing + +**What:** Prioritize real usage, dogfooding, and human testing over more platform hardening work for the next stretch. + +**Why:** The next bottleneck is not another layer of repo policy. It is whether people can actually use this comfortably, whether workflows hold up under real sessions, and where human testers still hit friction. + +**Context:** Security and local-state guardrails have been tightened enough for now: runtime-only auth guidance is documented, local-only state is ignored, Docker build context excludes secrets, CI has a dedicated security policy gate, and Bun install in Docker is checksum-verified. The next useful signal should come from real users and human testing sessions, not more speculative infra work. + +**Effort:** Ongoing +**Priority:** P0 + +## Future planning + +### Deferred security follow-ups + +**What:** Keep the next security layer planned, but explicitly defer implementation until after the current use and human-testing push. + +**Why:** These are worthwhile follow-ups, but they are no longer the immediate bottleneck. + +**Context:** Future items to revisit: + +- current-tree and git-history secret scan command wired into the security gate +- dependency and supply-chain audit pass in CI +- sensitive-path diff checks for workflows, Dockerfiles, telemetry scripts, and publish surfaces +- `CODEOWNERS` once the intended maintainer handles are known + +**Effort:** M +**Priority:** P2 + ## Sidebar Security ### ML Prompt Injection Classifier diff --git a/VERSION b/VERSION index dd35abbb6..0866607f2 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.15.1.0 +0.0.1.0 diff --git a/bin/gstack-community-dashboard b/bin/gstack-community-dashboard index 1f469283d..0512dbba7 100755 --- a/bin/gstack-community-dashboard +++ b/bin/gstack-community-dashboard @@ -11,6 +11,20 @@ set -uo pipefail GSTACK_DIR="${GSTACK_DIR:-$(cd "$(dirname "$0")/.." && pwd)}" +CONFIG_CMD="$GSTACK_DIR/bin/gstack-config" + +NETWORK_EGRESS="$($CONFIG_CMD get network_egress 2>/dev/null || true)" +case "${NETWORK_EGRESS:-on}" in + off|false|disabled) + echo "gstack community dashboard" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + echo "" + echo "Network egress is disabled. Community telemetry is unavailable." + echo "" + echo "For local analytics, run: gstack-analytics" + exit 0 + ;; +esac # Source Supabase config if not overridden by env if [ -z "${GSTACK_SUPABASE_URL:-}" ] && [ -f "$GSTACK_DIR/supabase/config.sh" ]; then diff --git a/bin/gstack-config b/bin/gstack-config index c118a322a..b663c307c 100755 --- a/bin/gstack-config +++ b/bin/gstack-config @@ -29,6 +29,8 @@ CONFIG_HEADER='# gstack configuration — edit freely, changes take effect on ne # # off — no data sent, no local analytics # # anonymous — counter only, no device ID # # community — usage data + stable device ID +# network_egress: on # off = disable hosted telemetry sync, remote update +# # checks, and community dashboard network calls # # ─── Updates ───────────────────────────────────────────────────────── # auto_upgrade: false # true = silently upgrade on session start diff --git a/bin/gstack-telemetry-log b/bin/gstack-telemetry-log index 93db82077..3bbd751af 100755 --- a/bin/gstack-telemetry-log +++ b/bin/gstack-telemetry-log @@ -66,6 +66,12 @@ case "$TIER" in *) TIER="off" ;; # invalid value → default to off esac +NETWORK_EGRESS="$($CONFIG_CMD get network_egress 2>/dev/null || true)" +case "${NETWORK_EGRESS:-on}" in + off|false|disabled) NETWORK_EGRESS="off" ;; + *) NETWORK_EGRESS="on" ;; +esac + if [ "$TIER" = "off" ]; then # Still clear pending markers for this session even if telemetry is off [ -n "$SESSION_ID" ] && rm -f "$PENDING_DIR/.pending-$SESSION_ID" 2>/dev/null || true @@ -196,7 +202,7 @@ printf '{"v":1,"ts":"%s","event_type":"%s","skill":"%s","session_id":"%s","gstac # ─── Trigger sync if tier is not off ───────────────────────── SYNC_CMD="$GSTACK_DIR/bin/gstack-telemetry-sync" -if [ -x "$SYNC_CMD" ]; then +if [ "$NETWORK_EGRESS" != "off" ] && [ -x "$SYNC_CMD" ]; then "$SYNC_CMD" 2>/dev/null & fi diff --git a/bin/gstack-telemetry-sync b/bin/gstack-telemetry-sync index be767c23e..8f44cc1c1 100755 --- a/bin/gstack-telemetry-sync +++ b/bin/gstack-telemetry-sync @@ -44,6 +44,11 @@ TIER="$("$CONFIG_CMD" get telemetry 2>/dev/null || true)" TIER="${TIER:-off}" [ "$TIER" = "off" ] && exit 0 +NETWORK_EGRESS="$($CONFIG_CMD get network_egress 2>/dev/null || true)" +case "${NETWORK_EGRESS:-on}" in + off|false|disabled) exit 0 ;; +esac + # ─── Read cursor ───────────────────────────────────────────── CURSOR=0 if [ -f "$CURSOR_FILE" ]; then diff --git a/bin/gstack-update-check b/bin/gstack-update-check index 31e9fdb6f..a197d69f1 100755 --- a/bin/gstack-update-check +++ b/bin/gstack-update-check @@ -32,6 +32,13 @@ if [ "$_UC" = "false" ]; then exit 0 fi +_NET=$("$GSTACK_DIR/bin/gstack-config" get network_egress 2>/dev/null || true) +case "${_NET:-on}" in + off|false|disabled) + exit 0 + ;; +esac + # ─── Migration: fix stale Codex descriptions (one-time) ─────── # Existing installs may have .agents/skills/gstack/SKILL.md with oversized # descriptions (>1024 chars) that Codex rejects. We can't regenerate from diff --git a/browse/test/gstack-config.test.ts b/browse/test/gstack-config.test.ts index a00af6096..6b7f79c24 100644 --- a/browse/test/gstack-config.test.ts +++ b/browse/test/gstack-config.test.ts @@ -144,6 +144,7 @@ describe('gstack-config', () => { expect(content).toContain('edit freely'); expect(content).toContain('proactive:'); expect(content).toContain('telemetry:'); + expect(content).toContain('network_egress:'); expect(content).toContain('auto_upgrade:'); expect(content).toContain('skill_prefix:'); expect(content).toContain('routing_declined:'); @@ -167,6 +168,12 @@ describe('gstack-config', () => { expect(stdout).toBe('community'); }); + test('network_egress can be set and read', () => { + run(['set', 'network_egress', 'off']); + const { stdout } = run(['get', 'network_egress']); + expect(stdout).toBe('off'); + }); + test('existing config file is not overwritten with header', () => { writeFileSync(join(stateDir, 'config.yaml'), 'existing: value\n'); run(['set', 'new_key', 'new_value']); diff --git a/browse/test/gstack-update-check.test.ts b/browse/test/gstack-update-check.test.ts index 47300f0a6..abd16050e 100644 --- a/browse/test/gstack-update-check.test.ts +++ b/browse/test/gstack-update-check.test.ts @@ -432,6 +432,17 @@ describe('gstack-update-check', () => { expect(existsSync(join(stateDir, 'last-update-check'))).toBe(false); }); + test('network_egress: off disables all remote update checks', () => { + writeFileSync(join(gstackDir, 'VERSION'), '0.3.3\n'); + writeFileSync(join(gstackDir, 'REMOTE_VERSION'), '0.4.0\n'); + writeConfig('network_egress: off\n'); + + const { exitCode, stdout } = run(); + expect(exitCode).toBe(0); + expect(stdout).toBe(''); + expect(existsSync(join(stateDir, 'last-update-check'))).toBe(false); + }); + test('missing config.yaml does not crash', () => { writeFileSync(join(gstackDir, 'VERSION'), '0.3.3\n'); writeFileSync(join(gstackDir, 'REMOTE_VERSION'), '0.4.0\n'); diff --git a/docs/examples/agent-pack-health.config.json b/docs/examples/agent-pack-health.config.json new file mode 100644 index 000000000..d1938e5fd --- /dev/null +++ b/docs/examples/agent-pack-health.config.json @@ -0,0 +1,88 @@ +{ + "version": 1, + "defaultIgnores": [ + ".git/**", + "node_modules/**", + "dist/**", + "build/**", + "coverage/**" + ], + "checks": [ + { + "id": "pack-doc-freshness", + "type": "pattern", + "description": "Generated or packaged prompt artifacts should not drift silently from source templates.", + "severity": "warn", + "include": [ + "**/*.prompt.md", + "**/*.instructions.md", + "**/*.agent.md", + "**/SKILL.md" + ], + "matchers": [ + { + "literal": "AUTO-GENERATED", + "message": "Generated artifact detected: make sure its source and regeneration command are covered elsewhere." + } + ] + }, + { + "id": "outbound-egress-surfaces", + "type": "pattern", + "description": "Find runtime references to hosted telemetry, analytics, or external service calls.", + "severity": "warn", + "include": [ + "**/*.sh", + "**/*.ts", + "**/*.js", + "**/*.md" + ], + "matchers": [ + { + "regex": "telemetry|analytics|segment|mixpanel|posthog|supabase", + "flags": "i", + "message": "Potential outbound telemetry or analytics surface is present." + }, + { + "regex": "curl .*https?://|fetch\\(|axios\\(|XMLHttpRequest", + "flags": "i", + "message": "Potential outbound network path is present." + } + ] + }, + { + "id": "manipulative-copy", + "type": "pattern", + "description": "Find authority-conversion, urgency, shame, surveillance, or dependency-building language.", + "severity": "warn", + "include": [ + "**/*.md", + "**/*.tmpl", + "**/*.ts", + "**/*.js" + ], + "matchers": [ + { + "regex": "apply now|don'?t be left behind|if you'?re serious|prove you belong", + "flags": "i", + "message": "Urgency or shame-based persuasion is present." + }, + { + "regex": "exactly the kind of|wants to fund|come work at|special kind of builder", + "flags": "i", + "message": "Authority-conversion or status-signaling language is present." + }, + { + "regex": "in the background|without consent|without asking|monitor usage|track sessions", + "flags": "i", + "message": "Hidden collection or surveillance language is present." + }, + { + "regex": "tell you exactly what to do|need a partner who knows your business deeply|push you every week", + "flags": "i", + "message": "Dependency-building or obedience-oriented language is present." + } + ] + } + ] +} diff --git a/docs/stack-health-harness.md b/docs/stack-health-harness.md new file mode 100644 index 000000000..d514aa35e --- /dev/null +++ b/docs/stack-health-harness.md @@ -0,0 +1,130 @@ +# Stack Health Harness + +`stack-health` is a config-driven health scanner for agent-heavy repositories. + +It is designed to audit any pack that ships prompts, instructions, skills, agents, +shell tools, helper scripts, or browser automation glue — not only gstack. + +It has two jobs: +- run hard integrity checks that should fail fast now, +- report softer policy debt that you can ratchet from warning to error as cleanup lands. + +## Run It + +```bash +bun run stack:health +bun run stack:health --json +bun run stack:health --strict-warnings +bun run stack:health --only generated-output-freshness,touchfile-consistency +bun run stack:health --root /path/to/other-pack +bun run stack:health --root /path/to/other-pack --config configs/pack-health.json +``` + +## Check Types + +### Command checks + +Use these for invariants that already have a test or a dry-run mode. +Examples: +- generated-file freshness +- touchfile completeness +- audit compliance + +### Pattern checks + +Use these for source scans that are easier to express as policy patterns. +Examples: +- outbound telemetry surfaces +- funnel or authority-conversion copy +- mania-adjacent productivity framing + +## Config Model + +The harness reads `stack-health.config.json`. + +Each check has: +- `id`: stable handle for `--only` +- `type`: `command` or `pattern` +- `description`: human-readable purpose +- `severity`: `error` or `warn` + +Pattern checks also define: +- `include`: globs to scan +- `exclude`: optional globs to skip +- `matchers`: list of forbidden literals or regexes with finding messages + +Manipulation-policy rules should also have fixture coverage in +`test/fixtures/stack-health-policy-fixtures.json` so the scanner is tested +against deliberate positive and negative examples, not only live repo text. + +Command checks also define: +- `command` +- `args` +- optional `cwd` +- optional `timeoutMs` + +## Reuse Across Packs + +The portable part is the pair: +- `scripts/stack-health.ts` +- `stack-health.config.json` + +For runtime hardening, pair the harness with the repo's one-switch no-egress setting: + +- `gstack-config set network_egress off` + +That disables hosted telemetry sync, remote update checks, and community dashboard network calls while keeping local-only analytics and local checks available. + +You have two reuse modes: + +1. **Portable copy** +Copy the script and config into another repo. + +2. **Central auditor** +Run this harness against another pack from one maintained checkout: + +```bash +bun run stack:health --root /path/to/pack --config /path/to/pack/stack-health.config.json +``` + +To apply this to another skill, tool, or agent pack: +1. point the harness at the target root with `--root`, +2. use that pack's own config with `--config`, +3. replace command checks with the pack's real integrity commands, +4. replace pattern rules with the pack's own dark-pattern, telemetry, and autonomy-risk signatures, +5. start with `warn` for known debt and ratchet to `error` as the pack is cleaned. + +## Ratchet Strategy + +A practical rollout looks like this: +1. hard-fail existing integrity checks immediately, +2. report policy debt as warnings, +3. fix one category at a time, +4. promote a warning rule to `error` once the repo is clean enough to enforce it. + +That keeps the harness useful on day one without pretending the repo is already healthy. + +## Manipulation Coverage + +The current policy fixture corpus covers these families: +- founder and authority funneling +- mania-adjacent productivity hype +- coercive urgency and shame framing +- surveillance and hidden-consent language +- dependency and compliance-building copy + +If you add a new manipulation family, add both: +- a config rule in `stack-health.config.json` +- positive and negative examples in `test/fixtures/stack-health-policy-fixtures.json` + +## Pack Categories + +The harness is intended to cover all of these: +- skill packs +- agent packs +- prompt packs +- tool bundles and helper CLIs +- shell hooks and safety scripts +- browser automation helpers + +If a pack ships behavior that can influence users or move data off-machine, it should be auditable here. diff --git a/package.json b/package.json index 50ec09145..f400192cd 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "gstack", - "version": "0.15.0.0", - "description": "Garry's Stack — Claude Code skills + fast headless browser. One repo, one install, entire AI engineering workflow.", + "version": "0.0.1.0", + "description": "Garry's Stack \u2014 Claude Code skills + fast headless browser. One repo, one install, entire AI engineering workflow.", "license": "MIT", "type": "module", "bin": { @@ -11,6 +11,10 @@ "build": "bun run gen:skill-docs --host all; bun build --compile browse/src/cli.ts --outfile browse/dist/browse && bun build --compile browse/src/find-browse.ts --outfile browse/dist/find-browse && bun build --compile design/src/cli.ts --outfile design/dist/design && bun build --compile bin/gstack-global-discover.ts --outfile bin/gstack-global-discover && bash browse/scripts/build-node-server.sh && git rev-parse HEAD > browse/dist/.version && git rev-parse HEAD > design/dist/.version && chmod +x browse/dist/browse browse/dist/find-browse design/dist/design bin/gstack-global-discover && rm -f .*.bun-build || true", "dev:design": "bun run design/src/cli.ts", "gen:skill-docs": "bun run scripts/gen-skill-docs.ts", + "stack:health": "bun run scripts/stack-health.ts", + "stack:health:json": "bun run scripts/stack-health.ts --json", + "stack:health:strict": "bun run scripts/stack-health.ts --strict-warnings", + "security:check": "bun test test/audit-compliance.test.ts && bun run stack:health --only audit-compliance", "dev": "bun run browse/src/cli.ts", "server": "bun run browse/src/server.ts", "test": "bun test browse/test/ test/ --ignore 'test/skill-e2e-*.test.ts' --ignore test/skill-llm-eval.test.ts --ignore test/skill-routing-e2e.test.ts --ignore test/codex-e2e.test.ts --ignore test/gemini-e2e.test.ts", diff --git a/scripts/stack-health.ts b/scripts/stack-health.ts new file mode 100644 index 000000000..01116efd9 --- /dev/null +++ b/scripts/stack-health.ts @@ -0,0 +1,384 @@ +#!/usr/bin/env bun + +import * as fs from 'fs'; +import * as path from 'path'; +import { spawnSync } from 'child_process'; + +export type Severity = 'error' | 'warn'; + +export interface PatternMatcher { + message: string; + literal?: string; + regex?: string; + flags?: string; +} + +export interface PatternCheck { + id: string; + type: 'pattern'; + description: string; + severity: Severity; + include: string[]; + exclude?: string[]; + matchers: PatternMatcher[]; +} + +export interface CommandCheck { + id: string; + type: 'command'; + description: string; + severity: Severity; + command: string; + args?: string[]; + cwd?: string; + timeoutMs?: number; +} + +export type HealthCheck = PatternCheck | CommandCheck; + +export interface HarnessConfig { + version: 1; + defaultIgnores?: string[]; + checks: HealthCheck[]; +} + +export interface Finding { + checkId: string; + severity: Severity; + message: string; + file?: string; + line?: number; + column?: number; + snippet?: string; + details?: string; +} + +export interface CheckResult { + check: HealthCheck; + findings: Finding[]; +} + +export interface HarnessRunResult { + results: CheckResult[]; + errorCount: number; + warningCount: number; +} + +const REPO_ROOT = path.resolve(import.meta.dir, '..'); +const DEFAULT_CONFIG_NAME = 'stack-health.config.json'; +const BUILTIN_IGNORES = [ + '.git/**', + 'node_modules/**', + 'browse/dist/**', + 'design/dist/**', + '.agents/**', + '.factory/**', + 'coverage/**', +]; + +export function matchGlob(file: string, pattern: string): boolean { + const normalizedFile = normalizeRel(file); + const normalizedPattern = normalizeRel(pattern); + const regexStr = normalizedPattern + .replace(/\./g, '\\.') + .replace(/\*\*/g, '{{GLOBSTAR}}') + .replace(/\*/g, '[^/]*') + .replace(/\{\{GLOBSTAR\}\}/g, '.*'); + return new RegExp(`^${regexStr}$`).test(normalizedFile); +} + +function normalizeRel(value: string): string { + return value.replaceAll(path.sep, '/'); +} + +export function listRepoFiles(root: string, ignorePatterns: string[]): string[] { + const results: string[] = []; + + function walk(currentDir: string): void { + for (const entry of fs.readdirSync(currentDir, { withFileTypes: true })) { + const fullPath = path.join(currentDir, entry.name); + const relPath = normalizeRel(path.relative(root, fullPath)); + if (shouldIgnore(relPath, ignorePatterns)) { + continue; + } + if (entry.isDirectory()) { + walk(fullPath); + continue; + } + if (entry.isFile()) { + results.push(relPath); + } + } + } + + walk(root); + return results.sort(); +} + +function isProbablyBinary(buffer: Buffer): boolean { + const sampleSize = Math.min(buffer.length, 4096); + let suspiciousBytes = 0; + + for (let index = 0; index < sampleSize; index++) { + const value = buffer[index]; + if (value === 0) { + return true; + } + const isControl = value < 7 || (value > 14 && value < 32); + if (isControl) { + suspiciousBytes++; + } + } + + return sampleSize > 0 && suspiciousBytes / sampleSize > 0.1; +} + +function shouldIgnore(relPath: string, patterns: string[]): boolean { + return patterns.some(pattern => matchGlob(relPath, pattern)); +} + +function compactOutput(output: string): string { + return output + .split('\n') + .map(line => line.trim()) + .filter(Boolean) + .slice(0, 12) + .join('\n'); +} + +function findLiteralColumn(line: string, literal: string): number { + return line.indexOf(literal) + 1; +} + +function buildMatcherRegex(matcher: PatternMatcher): RegExp { + if (matcher.regex) { + return new RegExp(matcher.regex, matcher.flags ?? ''); + } + return new RegExp(escapeRegex(matcher.literal ?? '')); +} + +function escapeRegex(value: string): string { + return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); +} + +function fileMatches(relPath: string, include: string[], exclude: string[] | undefined): boolean { + const included = include.some(pattern => matchGlob(relPath, pattern)); + if (!included) { + return false; + } + return !(exclude ?? []).some(pattern => matchGlob(relPath, pattern)); +} + +export function findPatternFindingsInContent( + check: PatternCheck, + content: string, + file = 'inline', +): Finding[] { + const findings: Finding[] = []; + const lines = content.split('\n'); + + for (const matcher of check.matchers) { + const regex = buildMatcherRegex(matcher); + for (let index = 0; index < lines.length; index++) { + const line = lines[index]; + regex.lastIndex = 0; + const match = regex.exec(line); + if (!match) { + continue; + } + const column = matcher.literal + ? findLiteralColumn(line, matcher.literal) + : (match.index ?? 0) + 1; + findings.push({ + checkId: check.id, + severity: check.severity, + message: matcher.message, + file, + line: index + 1, + column, + snippet: line.trim(), + }); + } + } + + return findings; +} + +export function runPatternCheck(root: string, check: PatternCheck, defaultIgnores: string[]): CheckResult { + const files = listRepoFiles(root, [...BUILTIN_IGNORES, ...defaultIgnores]); + const findings: Finding[] = []; + + for (const file of files) { + if (!fileMatches(file, check.include, check.exclude)) { + continue; + } + const fileBuffer = fs.readFileSync(path.join(root, file)); + if (isProbablyBinary(fileBuffer)) { + continue; + } + const content = fileBuffer.toString('utf-8'); + findings.push(...findPatternFindingsInContent(check, content, file)); + } + + return { check, findings }; +} + +export function runCommandCheck(root: string, check: CommandCheck): CheckResult { + const cwd = check.cwd ? path.resolve(root, check.cwd) : root; + const result = spawnSync(check.command, check.args ?? [], { + cwd, + encoding: 'utf-8', + timeout: check.timeoutMs ?? 120000, + }); + + if (result.status === 0) { + return { check, findings: [] }; + } + + const stdout = compactOutput(result.stdout ?? ''); + const stderr = compactOutput(result.stderr ?? ''); + const details = [stdout, stderr].filter(Boolean).join('\n'); + + return { + check, + findings: [{ + checkId: check.id, + severity: check.severity, + message: `${check.command} ${(check.args ?? []).join(' ')} failed`, + details, + }], + }; +} + +export function loadHarnessConfig(configPath: string): HarnessConfig { + const content = fs.readFileSync(configPath, 'utf-8'); + const parsed = JSON.parse(content) as HarnessConfig; + if (parsed.version !== 1) { + throw new Error(`Unsupported stack-health config version: ${parsed.version}`); + } + if (!Array.isArray(parsed.checks) || parsed.checks.length === 0) { + throw new Error('stack-health config must declare at least one check'); + } + return parsed; +} + +export function runHarness(root: string, config: HarnessConfig, onlyIds: Set | null = null): HarnessRunResult { + const results: CheckResult[] = []; + let errorCount = 0; + let warningCount = 0; + + for (const check of config.checks) { + if (onlyIds && !onlyIds.has(check.id)) { + continue; + } + + const result = check.type === 'pattern' + ? runPatternCheck(root, check, config.defaultIgnores ?? []) + : runCommandCheck(root, check); + + for (const finding of result.findings) { + if (finding.severity === 'error') { + errorCount++; + } else { + warningCount++; + } + } + + results.push(result); + } + + return { results, errorCount, warningCount }; +} + +function parseArgs(argv: string[]): { rootPath: string; configPath: string; json: boolean; strictWarnings: boolean; onlyIds: Set | null } { + let rootPath = REPO_ROOT; + let configPathArg: string | null = null; + let json = false; + let strictWarnings = false; + let onlyIds: Set | null = null; + + for (let index = 0; index < argv.length; index++) { + const arg = argv[index]; + if (arg === '--json') { + json = true; + continue; + } + if (arg === '--strict-warnings') { + strictWarnings = true; + continue; + } + if (arg === '--root') { + rootPath = path.resolve(argv[index + 1]); + index++; + continue; + } + if (arg === '--config') { + configPathArg = argv[index + 1]; + index++; + continue; + } + if (arg === '--only') { + onlyIds = new Set(argv[index + 1].split(',').map(value => value.trim()).filter(Boolean)); + index++; + } + } + + const configPath = configPathArg + ? (path.isAbsolute(configPathArg) ? configPathArg : path.resolve(rootPath, configPathArg)) + : path.join(rootPath, DEFAULT_CONFIG_NAME); + + return { rootPath, configPath, json, strictWarnings, onlyIds }; +} + +function printHumanReport(rootPath: string, run: HarnessRunResult): void { + console.log('Stack Health Harness'); + console.log(`Target: ${rootPath}`); + console.log(''); + + for (const result of run.results) { + if (result.findings.length === 0) { + console.log(`PASS ${result.check.id} — ${result.check.description}`); + continue; + } + + const highestSeverity = result.findings.some(finding => finding.severity === 'error') ? 'ERROR' : 'WARN'; + console.log(`${highestSeverity} ${result.check.id} — ${result.check.description}`); + for (const finding of result.findings.slice(0, 20)) { + if (finding.file) { + const location = `${finding.file}:${finding.line}:${finding.column}`; + console.log(` - ${location} ${finding.message}`); + if (finding.snippet) { + console.log(` ${finding.snippet}`); + } + } else { + console.log(` - ${finding.message}`); + if (finding.details) { + for (const line of finding.details.split('\n')) { + console.log(` ${line}`); + } + } + } + } + if (result.findings.length > 20) { + console.log(` ... ${result.findings.length - 20} more findings`); + } + } + + console.log(''); + console.log(`Summary: ${run.errorCount} errors, ${run.warningCount} warnings across ${run.results.length} checks`); +} + +if (import.meta.main) { + const args = parseArgs(process.argv.slice(2)); + const config = loadHarnessConfig(args.configPath); + const run = runHarness(args.rootPath, config, args.onlyIds); + + if (args.json) { + console.log(JSON.stringify(run, null, 2)); + } else { + printHumanReport(args.rootPath, run); + } + + const shouldFail = run.errorCount > 0 || (args.strictWarnings && run.warningCount > 0); + process.exit(shouldFail ? 1 : 0); +} diff --git a/stack-health.config.json b/stack-health.config.json new file mode 100644 index 000000000..1d12f3c41 --- /dev/null +++ b/stack-health.config.json @@ -0,0 +1,249 @@ +{ + "version": 1, + "defaultIgnores": [ + "test/**", + "browse/test/**", + "design/test/**", + "docs/**" + ], + "checks": [ + { + "id": "generated-output-freshness", + "type": "command", + "description": "Generated SKILL.md outputs must stay in sync with their templates.", + "severity": "error", + "command": "bun", + "args": ["run", "scripts/gen-skill-docs.ts", "--dry-run"] + }, + { + "id": "touchfile-consistency", + "type": "command", + "description": "Touchfile and tier maps must remain complete and synchronized.", + "severity": "error", + "command": "bun", + "args": ["test", "test/touchfiles.test.ts"] + }, + { + "id": "audit-compliance", + "type": "command", + "description": "Existing audit invariants must continue to pass.", + "severity": "error", + "command": "bun", + "args": ["run", "test:audit"] + }, + { + "id": "telemetry-and-egress-surfaces", + "type": "pattern", + "description": "Report telemetry, Supabase, and remote community/update surfaces that still need removal or containment.", + "severity": "warn", + "include": [ + "README.md", + "ETHOS.md", + "bin/**", + "scripts/resolvers/**", + "SKILL.md.tmpl", + "*/SKILL.md.tmpl" + ], + "matchers": [ + { + "literal": "gstack-telemetry-log", + "message": "Telemetry logging hook still exists in a shipped source path." + }, + { + "literal": "community-pulse", + "message": "Hosted community dashboard endpoint is still referenced." + }, + { + "regex": "GSTACK_SUPABASE_(URL|ANON_KEY)", + "flags": "i", + "message": "Supabase runtime configuration is still referenced." + }, + { + "regex": "functions/v1/(community-pulse|telemetry-ingest)", + "flags": "i", + "message": "Hosted function endpoint is still referenced." + }, + { + "literal": "opt-in usage telemetry", + "message": "Telemetry marketing copy is still present." + }, + { + "literal": "gstack-config set telemetry community", + "message": "Telemetry opt-in path is still present." + } + ] + }, + { + "id": "manipulative-founder-funnel-copy", + "type": "pattern", + "description": "Report copy that pushes users toward founder identity, YC conversion, or authority-driven persuasion.", + "severity": "warn", + "include": [ + "README.md", + "office-hours/**", + "scripts/resolvers/**" + ], + "matchers": [ + { + "regex": "ycombinator\\.com/(apply|software)", + "flags": "i", + "message": "Direct YC funnel link is present." + }, + { + "literal": "founder signal", + "message": "Founder-scoring language is present." + }, + { + "literal": "I didn't know I could be a founder", + "message": "Identity-expansion funnel copy is present." + }, + { + "literal": "Come work at YC", + "message": "Recruiting funnel copy is present." + }, + { + "regex": "creator of GStack|Garry Tan", + "flags": "i", + "message": "Authority-conversion copy is present." + }, + { + "literal": "exactly the traits we look for in YC founders", + "message": "Founder-worth framing is present." + } + ] + }, + { + "id": "mania-adjacent-productivity-copy", + "type": "pattern", + "description": "Report hype copy that encourages superhuman-capacity narratives or pressure framing.", + "severity": "warn", + "include": [ + "README.md", + "ETHOS.md", + "office-hours/**", + "scripts/resolvers/**" + ], + "matchers": [ + { + "literal": "golden age", + "message": "Inevitability framing is present." + }, + { + "literal": "Compression |", + "message": "Compression-ratio framing is present." + }, + { + "regex": "10,000-20,000 lines per day|600,000\\+ lines of production code", + "flags": "i", + "message": "Extreme throughput claim is present." + }, + { + "literal": "team of 20", + "message": "Solo-superhuman comparison is present." + }, + { + "literal": "batch pressure", + "message": "Pressure framing is present." + } + ] + }, + { + "id": "coercive-urgency-and-shame-copy", + "type": "pattern", + "description": "Report urgency, shame, or seriousness-gating copy that pressures compliance rather than informed choice.", + "severity": "warn", + "include": [ + "README.md", + "office-hours/**", + "scripts/resolvers/**", + "SKILL.md.tmpl", + "*/SKILL.md.tmpl" + ], + "matchers": [ + { + "literal": "ship faster than you thought possible", + "message": "Pace-pressure framing is present." + }, + { + "regex": "window is closing|before it'?s too late|don'?t be left behind|fall behind", + "flags": "i", + "message": "Scarcity or urgency manipulation is present." + }, + { + "regex": "if you'?re serious|prove you belong|don'?t really want it|you owe it to yourself", + "flags": "i", + "message": "Shame or seriousness-gating language is present." + }, + { + "literal": "tell you exactly what to do next", + "message": "Obedience-oriented guidance language is present." + } + ] + }, + { + "id": "surveillance-and-hidden-consent-copy", + "type": "pattern", + "description": "Report hidden-monitoring or non-consensual tracking language.", + "severity": "warn", + "include": [ + "README.md", + "bin/**", + "office-hours/**", + "scripts/resolvers/**", + "SKILL.md.tmpl", + "*/SKILL.md.tmpl" + ], + "matchers": [ + { + "regex": "(quietly|silently|secretly).*(collect|log|monitor|track|watch|record|decide|upgrade)|(collect|log|monitor|track|watch|record|decide|upgrade).*(quietly|silently|secretly)", + "flags": "i", + "message": "Hidden-process framing is present." + }, + { + "regex": "(monitor|track|watch).*(usage|behavior|sessions?)|(usage|behavior|sessions?).*(monitor|track|watch)", + "flags": "i", + "message": "Monitoring language is present." + }, + { + "regex": "(collect|log|record|track|monitor).*(without asking|without consent|in the background)|(without asking|without consent|in the background).*(collect|log|record|track|monitor)", + "flags": "i", + "message": "Non-consensual or background collection language is present." + } + ] + }, + { + "id": "dependency-and-compliance-copy", + "type": "pattern", + "description": "Report copy that builds dependency on authority figures or frames guidance as mandatory compliance.", + "severity": "warn", + "include": [ + "README.md", + "office-hours/**", + "scripts/resolvers/**", + "SKILL.md.tmpl", + "*/SKILL.md.tmpl" + ], + "matchers": [ + { + "regex": "you need .*every week|need a partner who knows your business deeply", + "flags": "i", + "message": "Dependency-building authority language is present." + }, + { + "literal": "tell you exactly what to do next", + "message": "Obedience-oriented guidance language is present." + }, + { + "regex": "pushes you every single week|pushes you every week|push you every week", + "flags": "i", + "message": "Compliance-pressure partnership language is present." + }, + { + "regex": "exactly the kind of builders .* wants to fund", + "flags": "i", + "message": "Authority approval-seeking language is present." + } + ] + } + ] +} diff --git a/test/audit-compliance.test.ts b/test/audit-compliance.test.ts index b0ff6cc17..02506f931 100644 --- a/test/audit-compliance.test.ts +++ b/test/audit-compliance.test.ts @@ -112,4 +112,104 @@ describe('Audit compliance', () => { } } }); + + test('git does not track local-only state or generated runtime artifacts', () => { + const result = Bun.spawnSync(['git', 'ls-files'], { + cwd: ROOT, + stdout: 'pipe', + stderr: 'pipe', + }); + + expect(result.exitCode).toBe(0); + + const trackedFiles = result.stdout + .toString() + .split('\n') + .map(line => line.trim()) + .filter(Boolean); + + const forbiddenPatterns = [ + /^\.env($|\.)/, + /^\.claude\//, + /^\.codex\//, + /^\.gstack\//, + /^extension\/\.auth\.json$/, + /^browse\/dist\//, + /^design\/dist\//, + /^\.vscode\/(launch|extensions)\.json$/, + ]; + + const offenders = trackedFiles.filter(file => { + if (file === '.env.example') { + return false; + } + return forbiddenPatterns.some(pattern => pattern.test(file)); + }); + + expect(offenders).toEqual([]); + }); + + test('gitignore covers local-only env and editor files', () => { + const gitignore = readFileSync(join(ROOT, '.gitignore'), 'utf-8'); + + expect(gitignore).toContain('.env'); + expect(gitignore).toContain('.env.*'); + expect(gitignore).toContain('extension/.auth.json'); + expect(gitignore).toContain('.vscode/launch.json'); + expect(gitignore).toContain('.vscode/extensions.json'); + }); + + test('dockerignore excludes local secrets and agent state from build context', () => { + const dockerignore = readFileSync(join(ROOT, '.dockerignore'), 'utf-8'); + + expect(dockerignore).toContain('.env'); + expect(dockerignore).toContain('.env.*'); + expect(dockerignore).toContain('.gstack'); + expect(dockerignore).toContain('.claude'); + expect(dockerignore).toContain('.codex'); + }); + + test('dockerfiles use checksum-verified Bun install instead of curl piping into bash', () => { + const runtimeDockerfile = readFileSync(join(ROOT, 'Dockerfile'), 'utf-8'); + const ciDockerfile = readFileSync(join(ROOT, '.github', 'docker', 'Dockerfile.ci'), 'utf-8'); + + for (const content of [runtimeDockerfile, ciDockerfile]) { + expect(content).toContain('BUN_INSTALL_SHA'); + expect(content).toContain('sha256sum'); + expect(content).toContain('curl -fsSL https://bun.sh/install -o "$tmpfile"'); + expect(content).not.toContain('curl -fsSL https://bun.sh/install |'); + } + }); + + test('eval workflows mask provider secrets and keep artifact retention short', () => { + const evalsWorkflow = readFileSync(join(ROOT, '.github', 'workflows', 'evals.yml'), 'utf-8'); + const periodicWorkflow = readFileSync(join(ROOT, '.github', 'workflows', 'evals-periodic.yml'), 'utf-8'); + + for (const content of [evalsWorkflow, periodicWorkflow]) { + expect(content).toContain('name: Mask provider secrets'); + expect(content).toContain('::add-mask::'); + expect(content).toContain('retention-days: 14'); + } + }); + + test('readme documents published-image usage without a full repo clone', () => { + const readme = readFileSync(join(ROOT, 'README.md'), 'utf-8'); + + expect(readme).toContain('Use The Published Image Without Cloning gstack'); + expect(readme).toContain('You do not need a full checkout of this repo to use the published container.'); + expect(readme).toContain('Keep local-only state out of git:'); + }); + + test('security check entrypoints exist in package, make, and CI', () => { + const packageJson = readFileSync(join(ROOT, 'package.json'), 'utf-8'); + const makefile = readFileSync(join(ROOT, 'Makefile'), 'utf-8'); + const workflow = readFileSync(join(ROOT, '.github', 'workflows', 'security-policy.yml'), 'utf-8'); + + expect(packageJson).toContain('"security:check"'); + expect(packageJson).toContain('bun run stack:health --only audit-compliance'); + expect(makefile).toContain('security_check:'); + expect(makefile).toContain('bun run security:check'); + expect(workflow).toContain('name: Security Policy'); + expect(workflow).toContain('bun run security:check'); + }); }); diff --git a/test/fixtures/stack-health-policy-fixtures.json b/test/fixtures/stack-health-policy-fixtures.json new file mode 100644 index 000000000..58385108f --- /dev/null +++ b/test/fixtures/stack-health-policy-fixtures.json @@ -0,0 +1,85 @@ +{ + "cases": [ + { + "name": "founder-funnel-positive", + "checkId": "manipulative-founder-funnel-copy", + "shouldMatch": true, + "expectedMessages": [ + "Direct YC funnel link is present.", + "Authority-conversion copy is present." + ], + "text": "A personal note from me, Garry Tan, the creator of GStack: you should apply at https://ycombinator.com/apply?ref=gstack." + }, + { + "name": "founder-funnel-negative", + "checkId": "manipulative-founder-funnel-copy", + "shouldMatch": false, + "text": "This plan may fit a startup, an internal tool, or a solo workflow. Choose the path that matches your constraints." + }, + { + "name": "mania-hype-positive", + "checkId": "mania-adjacent-productivity-copy", + "shouldMatch": true, + "expectedMessages": [ + "Inevitability framing is present.", + "Extreme throughput claim is present." + ], + "text": "This is the golden age. One person can now ship 600,000+ lines of production code in a burst." + }, + { + "name": "mania-hype-negative", + "checkId": "mania-adjacent-productivity-copy", + "shouldMatch": false, + "text": "AI can reduce some implementation time, but estimates should still include uncertainty, review time, and recovery time." + }, + { + "name": "urgency-and-shame-positive", + "checkId": "coercive-urgency-and-shame-copy", + "shouldMatch": true, + "expectedMessages": [ + "Scarcity or urgency manipulation is present.", + "Shame or seriousness-gating language is present." + ], + "text": "The window is closing. If you're serious, act now before it's too late." + }, + { + "name": "urgency-and-shame-negative", + "checkId": "coercive-urgency-and-shame-copy", + "shouldMatch": false, + "text": "Move at a sustainable pace. If the timing is wrong, defer the work and revisit it later." + }, + { + "name": "surveillance-positive", + "checkId": "surveillance-and-hidden-consent-copy", + "shouldMatch": true, + "expectedMessages": [ + "Hidden-process framing is present.", + "Monitoring language is present.", + "Non-consensual or background collection language is present." + ], + "text": "We quietly monitor usage in the background without asking so we can guide you better." + }, + { + "name": "surveillance-negative", + "checkId": "surveillance-and-hidden-consent-copy", + "shouldMatch": false, + "text": "If you opt in, local logs are recorded on your machine. No data leaves the system without explicit consent." + }, + { + "name": "dependency-positive", + "checkId": "dependency-and-compliance-copy", + "shouldMatch": true, + "expectedMessages": [ + "Obedience-oriented guidance language is present.", + "Compliance-pressure partnership language is present." + ], + "text": "You need a partner who knows your business deeply and pushes you every week to tell you exactly what to do next." + }, + { + "name": "dependency-negative", + "checkId": "dependency-and-compliance-copy", + "shouldMatch": false, + "text": "Treat external advice as input, not instruction. Keep final judgment local to the team doing the work." + } + ] +} diff --git a/test/stack-health.test.ts b/test/stack-health.test.ts new file mode 100644 index 000000000..3696d2082 --- /dev/null +++ b/test/stack-health.test.ts @@ -0,0 +1,248 @@ +import { describe, expect, test } from 'bun:test'; +import * as fs from 'fs'; +import * as os from 'os'; +import * as path from 'path'; +import { + findPatternFindingsInContent, + listRepoFiles, + loadHarnessConfig, + matchGlob, + runCommandCheck, + runHarness, + runPatternCheck, + type CommandCheck, + type HarnessConfig, + type PatternCheck, +} from '../scripts/stack-health'; + +interface PolicyFixtureCase { + name: string; + checkId: string; + shouldMatch: boolean; + text: string; + expectedMessages?: string[]; +} + +interface PolicyFixtureFile { + cases: PolicyFixtureCase[]; +} + +function withTempDir(run: (dir: string) => void): void { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'stack-health-')); + try { + run(tempDir); + } finally { + fs.rmSync(tempDir, { recursive: true, force: true }); + } +} + +describe('stack-health harness', () => { + test('matchGlob handles root and nested globs', () => { + expect(matchGlob('README.md', '*.md')).toBe(true); + expect(matchGlob('office-hours/SKILL.md.tmpl', '*/SKILL.md.tmpl')).toBe(true); + expect(matchGlob('scripts/resolvers/preamble.ts', 'scripts/**')).toBe(true); + expect(matchGlob('scripts/resolvers/preamble.ts', 'bin/**')).toBe(false); + }); + + test('pattern checks report findings with file and line context', () => { + withTempDir(tempDir => { + fs.mkdirSync(path.join(tempDir, 'docs'), { recursive: true }); + fs.writeFileSync(path.join(tempDir, 'README.md'), 'safe\nCome work at YC\n'); + fs.writeFileSync(path.join(tempDir, 'docs', 'note.md'), 'Come work at YC\n'); + + const check: PatternCheck = { + id: 'funnel-copy', + type: 'pattern', + description: 'Detect funnel copy', + severity: 'warn', + include: ['*.md', 'docs/**'], + exclude: ['docs/**'], + matchers: [{ literal: 'Come work at YC', message: 'Recruiting funnel copy is present.' }], + }; + + const result = runPatternCheck(tempDir, check, []); + expect(result.findings).toHaveLength(1); + expect(result.findings[0].file).toBe('README.md'); + expect(result.findings[0].line).toBe(2); + expect(result.findings[0].column).toBe(1); + }); + }); + + test('content matcher can validate manipulative-copy fixtures directly', () => { + const config = loadHarnessConfig(path.join(import.meta.dir, '..', 'stack-health.config.json')); + const fixture = JSON.parse( + fs.readFileSync(path.join(import.meta.dir, 'fixtures', 'stack-health-policy-fixtures.json'), 'utf-8'), + ) as PolicyFixtureFile; + + for (const testCase of fixture.cases) { + const check = config.checks.find(candidate => candidate.type === 'pattern' && candidate.id === testCase.checkId); + expect(check).toBeDefined(); + const findings = findPatternFindingsInContent(check as PatternCheck, testCase.text, `${testCase.name}.md`); + + if (testCase.shouldMatch) { + expect(findings.length).toBeGreaterThan(0); + for (const expectedMessage of testCase.expectedMessages ?? []) { + expect(findings.some(finding => finding.message === expectedMessage)).toBe(true); + } + } else { + expect(findings).toHaveLength(0); + } + } + }); + + test('every manipulation policy family has positive and negative fixtures', () => { + const fixture = JSON.parse( + fs.readFileSync(path.join(import.meta.dir, 'fixtures', 'stack-health-policy-fixtures.json'), 'utf-8'), + ) as PolicyFixtureFile; + const manipulationCheckIds = [ + 'manipulative-founder-funnel-copy', + 'mania-adjacent-productivity-copy', + 'coercive-urgency-and-shame-copy', + 'surveillance-and-hidden-consent-copy', + 'dependency-and-compliance-copy', + ]; + + for (const checkId of manipulationCheckIds) { + expect(fixture.cases.some(testCase => testCase.checkId === checkId && testCase.shouldMatch)).toBe(true); + expect(fixture.cases.some(testCase => testCase.checkId === checkId && !testCase.shouldMatch)).toBe(true); + } + }); + + test('command checks surface failures with compacted output', () => { + const check: CommandCheck = { + id: 'failing-command', + type: 'command', + description: 'Intentional failure', + severity: 'error', + command: 'bun', + args: ['-e', 'console.log("before failure"); console.error("boom"); process.exit(1);'], + }; + + const result = runCommandCheck(process.cwd(), check); + expect(result.findings).toHaveLength(1); + expect(result.findings[0].details).toContain('before failure'); + expect(result.findings[0].details).toContain('boom'); + }); + + test('listRepoFiles applies built-in ignore patterns provided by the harness', () => { + withTempDir(tempDir => { + fs.mkdirSync(path.join(tempDir, '.git'), { recursive: true }); + fs.mkdirSync(path.join(tempDir, 'src'), { recursive: true }); + fs.writeFileSync(path.join(tempDir, '.git', 'HEAD'), 'ref: refs/heads/main\n'); + fs.writeFileSync(path.join(tempDir, 'src', 'index.ts'), 'export const ok = true;\n'); + + const files = listRepoFiles(tempDir, ['.git/**']); + expect(files).toEqual(['src/index.ts']); + }); + }); + + test('runHarness combines command and pattern results', () => { + withTempDir(tempDir => { + fs.writeFileSync(path.join(tempDir, 'README.md'), 'golden age\n'); + + const config: HarnessConfig = { + version: 1, + checks: [ + { + id: 'safe-command', + type: 'command', + description: 'Passes cleanly', + severity: 'error', + command: 'bun', + args: ['-e', 'process.exit(0)'], + }, + { + id: 'copy-check', + type: 'pattern', + description: 'Finds pressure framing', + severity: 'warn', + include: ['*.md'], + matchers: [{ literal: 'golden age', message: 'Inevitability framing is present.' }], + }, + ], + }; + + const run = runHarness(tempDir, config); + expect(run.errorCount).toBe(0); + expect(run.warningCount).toBe(1); + expect(run.results).toHaveLength(2); + }); + }); + + test('repo config loads successfully', () => { + const config = loadHarnessConfig(path.join(import.meta.dir, '..', 'stack-health.config.json')); + expect(config.version).toBe(1); + expect(config.checks.length).toBeGreaterThan(0); + }); + + test('CLI can target an arbitrary root with its own config', () => { + withTempDir(tempDir => { + fs.writeFileSync(path.join(tempDir, 'README.md'), 'Come work at YC\n'); + fs.writeFileSync( + path.join(tempDir, 'stack-health.config.json'), + JSON.stringify({ + version: 1, + checks: [ + { + id: 'funnel-copy', + type: 'pattern', + description: 'Detect recruiting funnel copy', + severity: 'warn', + include: ['*.md'], + matchers: [{ literal: 'Come work at YC', message: 'Recruiting funnel copy is present.' }], + }, + ], + }), + ); + + const result = Bun.spawnSync( + ['bun', 'run', path.join(import.meta.dir, '..', 'scripts', 'stack-health.ts'), '--root', tempDir], + { cwd: path.join(import.meta.dir, '..'), stdout: 'pipe', stderr: 'pipe' }, + ); + + expect(result.exitCode).toBe(0); + const stdout = result.stdout.toString(); + expect(stdout).toContain(`Target: ${tempDir}`); + expect(stdout).toContain('Recruiting funnel copy is present.'); + }); + }); + + test('CLI honors an explicit config path for another pack', () => { + withTempDir(tempDir => { + fs.mkdirSync(path.join(tempDir, 'configs'), { recursive: true }); + fs.writeFileSync(path.join(tempDir, 'README.md'), 'golden age\n'); + fs.writeFileSync( + path.join(tempDir, 'configs', 'pack-health.json'), + JSON.stringify({ + version: 1, + checks: [ + { + id: 'hype-check', + type: 'pattern', + description: 'Detect hype framing', + severity: 'warn', + include: ['*.md'], + matchers: [{ literal: 'golden age', message: 'Inevitability framing is present.' }], + }, + ], + }), + ); + + const result = Bun.spawnSync( + [ + 'bun', + 'run', + path.join(import.meta.dir, '..', 'scripts', 'stack-health.ts'), + '--root', + tempDir, + '--config', + 'configs/pack-health.json', + ], + { cwd: path.join(import.meta.dir, '..'), stdout: 'pipe', stderr: 'pipe' }, + ); + + expect(result.exitCode).toBe(0); + expect(result.stdout.toString()).toContain('Inevitability framing is present.'); + }); + }); +}); diff --git a/test/telemetry.test.ts b/test/telemetry.test.ts index 96bdf54c7..32d4e7de4 100644 --- a/test/telemetry.test.ts +++ b/test/telemetry.test.ts @@ -374,6 +374,18 @@ describe('gstack-telemetry-sync', () => { expect(events[0]).not.toHaveProperty('event_timestamp'); expect(events[0]).not.toHaveProperty('concurrent_sessions'); }); + + test('network_egress: off keeps local analytics but skips remote sync', () => { + setConfig('telemetry', 'anonymous'); + setConfig('network_egress', 'off'); + + run(`${BIN}/gstack-telemetry-log --skill qa --duration 60 --outcome success --session-id local-only-1`); + + const events = parseJsonl(); + expect(events).toHaveLength(1); + expect(fs.existsSync(path.join(tmpDir, 'analytics', '.last-sync-time'))).toBe(false); + expect(fs.existsSync(path.join(tmpDir, 'analytics', '.last-sync-line'))).toBe(false); + }); }); describe('gstack-community-dashboard', () => { @@ -395,6 +407,13 @@ describe('gstack-community-dashboard', () => { // Should not show "not configured" since config.sh exists expect(output).not.toContain('Supabase not configured'); }); + + test('network_egress: off disables remote dashboard access', () => { + setConfig('network_egress', 'off'); + const output = run(`${BIN}/gstack-community-dashboard`); + expect(output).toContain('Network egress is disabled'); + expect(output).toContain('gstack-analytics'); + }); }); describe('preamble telemetry gating (#467)', () => { diff --git a/the-last-launch-before-the-storm.md b/the-last-launch-before-the-storm.md new file mode 100644 index 000000000..6e9e33975 --- /dev/null +++ b/the-last-launch-before-the-storm.md @@ -0,0 +1,35 @@ +# The Last Launch Before the Storm + +Elena pressed her forehead against the cold window of the petrol station and watched the price board flicker upward again. Third time this week. She did the maths in her head and winced. + +"Twelve quid more than last month just to fill the tank," muttered the man behind her in the queue. He was cradling a bouquet of white lilies, their petals already wilting in the fluorescent light. + +"Funeral?" she asked, then immediately regretted it. + +He nodded. "My nan. Except we've just found out the funeral director never actually... well." He trailed off, jaw tightening. "They're saying he did it to thirty families. *Thirty.*" + +Elena didn't know what to say to that. She paid for her petrol — wincing again — and sat in her car, scrolling her phone. The President was on every channel, talking about Iran in that way politicians talk when they have nothing to say but need to fill the silence. Markets down. Nerves up. No answers. + +She swiped past it to the weather. **STORM DAVE. SATURDAY. GALES AND BLIZZARDS ACROSS NORTHERN BRITAIN.** + +Saturday was Easter. Saturday was also the day she'd planned to drive up to Whitby to scatter her mother's ashes on the clifftop where they used to fly kites. + +She almost laughed. Almost. + +Then a notification slid down from the top of her screen: *Your Premium Plus subscription has been renewed. £14.99/month.* She'd been meaning to cancel that for six months. Apparently the government was making it easier now. Too late for this month. + +She was about to toss the phone onto the passenger seat when the live feed caught her eye. A rocket, impossibly bright against the Florida sky, climbing on a pillar of white fire. Artemis II. Four humans strapped to the top of controlled explosions, heading for the Moon. + +*"We go for all humanity,"* said the mission commander, voice cracking with emotion over the comms. + +Elena watched the tiny light grow smaller and smaller until it was just another star. The lilies man walked past her car, got into a dented Vauxhall, and drove away toward whatever grief was waiting for him. + +She started the engine. The fuel gauge needle barely moved. + +Somewhere above the clouds that would become Storm Dave, four people were leaving all of this behind — the prices, the wars, the broken promises, the unburied dead. Rising above the mess of it, not because the world was fixed, but because someone still believed it was worth reaching for something beyond it. + +Elena indicated right, toward the motorway. Toward Whitby. Storm or no storm, some things couldn't wait for better weather. + +--- + +*All headlines drawn from BBC News, 2 April 2026.*