diff --git a/skills/.curated/dockerize-app/LICENSE.txt b/skills/.curated/dockerize-app/LICENSE.txt new file mode 100644 index 00000000..13e25df8 --- /dev/null +++ b/skills/.curated/dockerize-app/LICENSE.txt @@ -0,0 +1,201 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf of + any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don\'t include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/skills/.curated/dockerize-app/SKILL.md b/skills/.curated/dockerize-app/SKILL.md new file mode 100644 index 00000000..dee5ec5a --- /dev/null +++ b/skills/.curated/dockerize-app/SKILL.md @@ -0,0 +1,72 @@ +--- +name: dockerize-app +description: Inspect a repository and generate Docker artifacts for common app stacks. Use when a user asks to dockerize a project, containerize an app, create Dockerfile/.dockerignore, produce docker-compose.yml for local development, or detect frameworks like Next.js, Vite, FastAPI, Django, Express, Nest, Go, or Rust and scaffold container configs. +--- + +# Dockerize App + +Generate Docker setup files by inspecting repository signals. + +## Quick start + +1. Run the bundled script from the target repository root: + +```bash +python3 "$CODEX_HOME/skills/dockerize-app/scripts/dockerize_app.py" --repo . +``` + +2. To also create optional files when missing: + +```bash +python3 "$CODEX_HOME/skills/dockerize-app/scripts/dockerize_app.py" \ + --repo . \ + --with-env-example \ + --with-compose-override +``` + +3. Review generated files and adjust ports, commands, and env values for project specifics. + +## What the script detects + +- Node signals: + - `package.json` + - `pnpm-lock.yaml`, `package-lock.json`, `yarn.lock` + - Framework markers in dependencies: `next`, `vite`, `express`, `@nestjs/core` +- Python signals: + - `requirements.txt`, `pyproject.toml`, `poetry.lock` + - Framework markers: `fastapi`, `django` +- Go signal: + - `go.mod` +- Rust signal: + - `Cargo.toml` + +## Generated files + +- `Dockerfile` +- `.dockerignore` +- `docker-compose.yml` (local dev oriented) +- `.env.example` (optional, only if missing and `--with-env-example` is set) +- `compose.override.yml` (optional, only if missing and `--with-compose-override` is set) + +## Behavior + +- Prefer not to overwrite existing files unless `--force` is provided. +- Print detected stack/framework/package manager and written/skipped files. +- Use deterministic templates and conservative defaults. + +## References + +- For deeper behavior details (stack priority, framework mapping, port defaults, and template output expectations), read: + - `references/detection-and-templates.md` + +## Command reference + +```bash +python3 scripts/dockerize_app.py --repo [--force] [--dry-run] [--with-env-example] [--with-compose-override] +``` + +- `--repo`: target repository path (default `.`) +- `--force`: overwrite existing generated files +- `--dry-run`: print outputs without writing +- `--with-env-example`: create `.env.example` if missing +- `--with-compose-override`: create `compose.override.yml` if missing diff --git a/skills/.curated/dockerize-app/agents/openai.yaml b/skills/.curated/dockerize-app/agents/openai.yaml new file mode 100644 index 00000000..fe2515da --- /dev/null +++ b/skills/.curated/dockerize-app/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Dockerize App" + short_description: "Detect stack and scaffold Docker setup" + default_prompt: "Inspect this repository and generate Dockerfile, .dockerignore, and docker-compose.yml for local development. Add .env.example and compose.override.yml if useful." diff --git a/skills/.curated/dockerize-app/assets/docker.png b/skills/.curated/dockerize-app/assets/docker.png new file mode 100644 index 00000000..1b11bb15 Binary files /dev/null and b/skills/.curated/dockerize-app/assets/docker.png differ diff --git a/skills/.curated/dockerize-app/references/detection-and-templates.md b/skills/.curated/dockerize-app/references/detection-and-templates.md new file mode 100644 index 00000000..61d6b827 --- /dev/null +++ b/skills/.curated/dockerize-app/references/detection-and-templates.md @@ -0,0 +1,71 @@ +# Detection And Templates + +This reference describes how `scripts/dockerize_app.py` decides what to generate. + +## Stack detection order + +When multiple signals exist, the script chooses the first stack from this priority: + +1. `node` +2. `python` +3. `go` +4. `rust` + +Example: If a repo has both `package.json` and `go.mod`, Node wins unless the script is changed. + +## Stack signals + +- Node: + - `package.json` is required for Node detection. + - Package manager detection order: + - `packageManager` field in `package.json` + - `pnpm-lock.yaml` + - `yarn.lock` + - default `npm` +- Python: + - `requirements.txt` or `pyproject.toml` + - install mode: + - Poetry if `poetry.lock` or `[tool.poetry]` + - pyproject install if only `pyproject.toml` exists + - requirements-based otherwise +- Go: + - `go.mod` +- Rust: + - `Cargo.toml` + +## Framework mapping + +- Node: + - `next` -> `nextjs`, default port `3000` + - `vite` -> `vite`, default port `5173` + - `@nestjs/core` or `@nestjs/common` -> `nest`, default port `3000` + - `express` -> `express`, default port `3000` +- Python: + - `fastapi` in requirements/pyproject -> `fastapi`, default port `8000` + - `django` in requirements/pyproject -> `django`, default port `8000` + +## Generated files and intent + +- `Dockerfile`: + - Production-leaning default image with stack-specific install/build/start. +- `.dockerignore`: + - Common exclusions plus stack-specific ignores. +- `docker-compose.yml`: + - Local-dev oriented, maps `${PORT:-}` and runs the detected dev command. +- `.env.example` (optional flag): + - Basic starter vars. +- `compose.override.yml` (optional flag): + - Dev-only volume mounts. + +## Safe write behavior + +- Existing files are not overwritten unless `--force` is used. +- `--dry-run` previews content without writing. + +## Practical workflow + +1. Run dry-run first: + - `python3 scripts/dockerize_app.py --repo . --dry-run --with-env-example --with-compose-override` +2. Review commands/ports. +3. Run again without dry-run. +4. If needed, rerun with `--force` after manual review. diff --git a/skills/.curated/dockerize-app/scripts/dockerize_app.py b/skills/.curated/dockerize-app/scripts/dockerize_app.py new file mode 100644 index 00000000..411e472c --- /dev/null +++ b/skills/.curated/dockerize-app/scripts/dockerize_app.py @@ -0,0 +1,593 @@ +#!/usr/bin/env python3 +"""Detect project stack and generate Docker artifacts.""" + +from __future__ import annotations + +import argparse +import json +import re +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any + + +STACK_PRIORITY = ["node", "python", "go", "rust"] + + +@dataclass +class Detection: + stack: str + framework: str | None + package_manager: str | None + port: int + start_command: str + dev_command: str + notes: list[str] = field(default_factory=list) + scripts: dict[str, str] = field(default_factory=dict) + details: dict[str, Any] = field(default_factory=dict) + + +def read_text_if_exists(path: Path) -> str: + if not path.exists(): + return "" + return path.read_text(encoding="utf-8", errors="ignore") + + +def read_json_if_exists(path: Path) -> dict[str, Any]: + if not path.exists(): + return {} + raw = path.read_text(encoding="utf-8", errors="ignore").lstrip("\ufeff") + try: + data = json.loads(raw) + except json.JSONDecodeError: + return {} + return data if isinstance(data, dict) else {} + + +def normalize_package_manager(value: str | None) -> str | None: + if not value: + return None + lowered = value.lower().strip() + if "@" in lowered: + lowered = lowered.split("@", 1)[0] + if lowered in {"pnpm", "npm", "yarn"}: + return lowered + return None + + +def choose_package_manager(repo: Path, package_json: dict[str, Any]) -> str: + by_field = normalize_package_manager(str(package_json.get("packageManager", ""))) + if by_field: + return by_field + if (repo / "pnpm-lock.yaml").exists(): + return "pnpm" + if (repo / "yarn.lock").exists(): + return "yarn" + return "npm" + + +def pm_run_script(pm: str, script: str, extra_args: list[str] | None = None) -> str: + base = f"{pm} run {script}" + if not extra_args: + return base + suffix = " ".join(extra_args) + return f"{base} -- {suffix}" + + +def detect_node_framework(package_json: dict[str, Any]) -> str | None: + deps: dict[str, Any] = {} + for key in ("dependencies", "devDependencies", "peerDependencies"): + value = package_json.get(key) + if isinstance(value, dict): + deps.update(value) + names = {str(name).lower() for name in deps.keys()} + if "next" in names: + return "nextjs" + if "vite" in names: + return "vite" + if "@nestjs/core" in names or "@nestjs/common" in names: + return "nest" + if "express" in names: + return "express" + return None + + +def detect_python_framework(repo: Path) -> str | None: + requirements = read_text_if_exists(repo / "requirements.txt").lower() + pyproject = read_text_if_exists(repo / "pyproject.toml").lower() + combined = "\n".join([requirements, pyproject]) + if "fastapi" in combined: + return "fastapi" + if "django" in combined: + return "django" + return None + + +def detect_stack_candidates(repo: Path) -> list[str]: + candidates: list[str] = [] + if (repo / "package.json").exists(): + candidates.append("node") + if (repo / "requirements.txt").exists() or (repo / "pyproject.toml").exists(): + candidates.append("python") + if (repo / "go.mod").exists(): + candidates.append("go") + if (repo / "Cargo.toml").exists(): + candidates.append("rust") + return candidates + + +def detect_fastapi_module(repo: Path) -> str: + candidates = [ + ("app/main.py", "app.main"), + ("main.py", "main"), + ("app.py", "app"), + ("src/main.py", "src.main"), + ] + for rel, module in candidates: + if (repo / rel).exists(): + content = read_text_if_exists(repo / rel).lower() + if "fastapi(" in content or "from fastapi import" in content: + return module + return "main" + + +def detect_django_manage_path(repo: Path) -> str: + root_manage = repo / "manage.py" + if root_manage.exists(): + return "manage.py" + for path in repo.glob("*/manage.py"): + if path.is_file(): + return str(path.relative_to(repo)).replace("\\", "/") + return "manage.py" + + +def detect_python_entrypoint(repo: Path) -> str: + for rel in ("main.py", "app.py", "src/main.py"): + if (repo / rel).exists(): + return rel + return "main.py" + + +def detect_go_target(repo: Path) -> str: + if (repo / "main.go").exists(): + return "." + cmd_dir = repo / "cmd" + if cmd_dir.is_dir(): + mains = [p for p in cmd_dir.glob("*/main.go") if p.is_file()] + if len(mains) == 1: + target = mains[0].parent.relative_to(repo).as_posix() + return f"./{target}" + return "." + + +def parse_rust_bin_name(repo: Path) -> str: + cargo_toml = read_text_if_exists(repo / "Cargo.toml") + match = re.search(r'(?m)^\s*name\s*=\s*"([^"]+)"\s*$', cargo_toml) + if match: + return match.group(1).strip().replace("-", "_") + return "app" + + +def detect_project(repo: Path) -> Detection: + package_json = read_json_if_exists(repo / "package.json") + candidates = detect_stack_candidates(repo) + if not candidates: + raise RuntimeError( + "Could not detect project stack. Expected one of: package.json, requirements.txt, " + "pyproject.toml, go.mod, Cargo.toml." + ) + + notes: list[str] = [] + if len(candidates) > 1: + notes.append( + "Multiple stack signals found: " + + ", ".join(candidates) + + ". Using priority: " + + " > ".join(STACK_PRIORITY) + + "." + ) + stack = sorted(candidates, key=lambda item: STACK_PRIORITY.index(item))[0] + + if stack == "node": + pm = choose_package_manager(repo, package_json) + framework = detect_node_framework(package_json) + scripts = package_json.get("scripts", {}) + scripts = scripts if isinstance(scripts, dict) else {} + + port = 5173 if framework == "vite" else 3000 + + if framework == "nextjs": + dev_command = ( + pm_run_script(pm, "dev", ["--hostname", "0.0.0.0", "--port", str(port)]) + if "dev" in scripts + else f"next dev --hostname 0.0.0.0 --port {port}" + ) + start_command = ( + pm_run_script(pm, "start") + if "start" in scripts + else f"next start --hostname 0.0.0.0 --port {port}" + ) + elif framework == "vite": + dev_command = ( + pm_run_script(pm, "dev", ["--host", "0.0.0.0", "--port", str(port)]) + if "dev" in scripts + else f"vite --host 0.0.0.0 --port {port}" + ) + if "preview" in scripts: + start_command = pm_run_script( + pm, "preview", ["--host", "0.0.0.0", "--port", str(port)] + ) + elif "start" in scripts: + start_command = pm_run_script(pm, "start") + else: + start_command = f"vite preview --host 0.0.0.0 --port {port}" + elif framework == "nest": + dev_command = ( + pm_run_script(pm, "start:dev") + if "start:dev" in scripts + else pm_run_script(pm, "dev") + if "dev" in scripts + else "nest start --watch" + ) + start_command = ( + pm_run_script(pm, "start:prod") + if "start:prod" in scripts + else pm_run_script(pm, "start") + if "start" in scripts + else "node dist/main.js" + ) + else: + dev_script = "dev" if "dev" in scripts else "start:dev" if "start:dev" in scripts else "start" + dev_command = ( + pm_run_script(pm, dev_script) + if dev_script in scripts + else "node --watch ." + ) + start_command = ( + pm_run_script(pm, "start") + if "start" in scripts + else "node server.js" + if framework == "express" + else "node ." + ) + + return Detection( + stack=stack, + framework=framework, + package_manager=pm, + port=port, + start_command=start_command, + dev_command=dev_command, + notes=notes, + scripts={str(k): str(v) for k, v in scripts.items()}, + ) + + if stack == "python": + framework = detect_python_framework(repo) + port = 8000 + install_mode = "requirements" + if (repo / "poetry.lock").exists(): + install_mode = "poetry" + elif (repo / "pyproject.toml").exists() and "[tool.poetry]" in read_text_if_exists( + repo / "pyproject.toml" + ): + install_mode = "poetry" + elif (repo / "pyproject.toml").exists() and not (repo / "requirements.txt").exists(): + install_mode = "pyproject" + + if framework == "fastapi": + module = detect_fastapi_module(repo) + start_command = f"uvicorn {module}:app --host 0.0.0.0 --port {port}" + dev_command = f"uvicorn {module}:app --host 0.0.0.0 --port {port} --reload" + notes.append(f"Using FastAPI module '{module}'.") + elif framework == "django": + manage = detect_django_manage_path(repo) + start_command = f"python {manage} runserver 0.0.0.0:{port}" + dev_command = start_command + notes.append(f"Using Django manage path '{manage}'.") + else: + entry = detect_python_entrypoint(repo) + start_command = f"python {entry}" + dev_command = start_command + notes.append(f"Using Python entrypoint '{entry}'.") + + return Detection( + stack=stack, + framework=framework, + package_manager=None, + port=port, + start_command=start_command, + dev_command=dev_command, + notes=notes, + details={"python_install_mode": install_mode}, + ) + + if stack == "go": + target = detect_go_target(repo) + port = 8080 + return Detection( + stack=stack, + framework=None, + package_manager=None, + port=port, + start_command="/app/bin/app", + dev_command=f"go run {target}", + notes=notes + [f"Using Go build target '{target}'."], + details={"go_target": target}, + ) + + if stack == "rust": + bin_name = parse_rust_bin_name(repo) + port = 8080 + return Detection( + stack=stack, + framework=None, + package_manager=None, + port=port, + start_command=f"/usr/local/bin/{bin_name}", + dev_command="cargo run", + notes=notes + [f"Using Rust binary name '{bin_name}'."], + details={"rust_bin_name": bin_name}, + ) + + raise RuntimeError(f"Unsupported detected stack: {stack}") + + +def node_install_command(det: Detection, repo: Path) -> str: + pm = det.package_manager or "npm" + if pm == "pnpm": + return "corepack enable && pnpm install --frozen-lockfile" + if pm == "yarn": + return "corepack enable && yarn install --frozen-lockfile" + if (repo / "package-lock.json").exists(): + return "npm ci" + return "npm install" + + +def python_install_command(repo: Path, det: Detection) -> str: + mode = str(det.details.get("python_install_mode", "requirements")) + if mode == "poetry": + return ( + "pip install --no-cache-dir --upgrade pip poetry && " + "poetry config virtualenvs.create false && " + "poetry install --no-interaction --no-ansi" + ) + if mode == "pyproject": + return "pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir ." + return "pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir -r requirements.txt" + + +def generate_dockerfile(repo: Path, det: Detection) -> str: + if det.stack == "node": + install_cmd = node_install_command(det, repo) + build_cmd = ( + pm_run_script(det.package_manager or "npm", "build") + " || true" + if "build" in det.scripts + else "echo \"No build script detected; skipping build step\"" + ) + return f"""FROM node:20-alpine +WORKDIR /app + +COPY package.json ./ +COPY package-lock.json* pnpm-lock.yaml* yarn.lock* ./ +RUN {install_cmd} + +COPY . . +RUN {build_cmd} + +EXPOSE {det.port} +CMD ["sh", "-c", "{det.start_command}"] +""" + + if det.stack == "python": + install_cmd = python_install_command(repo, det) + return f"""FROM python:3.12-slim +WORKDIR /app + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +COPY requirements*.txt pyproject.toml poetry.lock* ./ +RUN {install_cmd} + +COPY . . + +EXPOSE {det.port} +CMD ["sh", "-c", "{det.start_command}"] +""" + + if det.stack == "go": + target = str(det.details.get("go_target", ".")) + return f"""FROM golang:1.22-alpine AS builder +WORKDIR /app + +COPY go.mod go.sum* ./ +RUN go mod download + +COPY . . +RUN CGO_ENABLED=0 GOOS=linux go build -o /app/bin/app {target} + +FROM alpine:3.20 +WORKDIR /app +COPY --from=builder /app/bin/app /app/bin/app + +EXPOSE {det.port} +CMD ["/app/bin/app"] +""" + + if det.stack == "rust": + bin_name = str(det.details.get("rust_bin_name", "app")) + return f"""FROM rust:1.77 AS builder +WORKDIR /app + +COPY Cargo.toml Cargo.lock* ./ +RUN mkdir src && echo "fn main() {{}}" > src/main.rs && cargo build --release || true +RUN rm -rf src + +COPY . . +RUN cargo build --release + +FROM debian:bookworm-slim +WORKDIR /app +COPY --from=builder /app/target/release/{bin_name} /usr/local/bin/{bin_name} + +EXPOSE {det.port} +CMD ["/usr/local/bin/{bin_name}"] +""" + + raise RuntimeError(f"Unsupported stack: {det.stack}") + + +def generate_dockerignore(det: Detection) -> str: + entries = [ + ".git", + ".git/*", + "__pycache__", + "*.py[cod]", + "*.log", + ".pytest_cache", + ".mypy_cache", + ".ruff_cache", + ".DS_Store", + ".idea", + ".vscode", + ".venv", + "venv", + ".env", + ".env.*", + "coverage", + "dist", + "build", + "target", + ] + if det.stack == "node": + entries.extend(["node_modules", ".next", ".turbo", ".pnpm-store"]) + if det.stack == "python": + entries.extend([".tox", "*.sqlite3"]) + return "\n".join(dict.fromkeys(entries)) + "\n" + + +def generate_compose(det: Detection) -> str: + return f"""services: + app: + build: + context: . + dockerfile: Dockerfile + env_file: + - .env + ports: + - "${{PORT:-{det.port}}}:{det.port}" + command: sh -c "{det.dev_command}" +""" + + +def generate_compose_override(det: Detection) -> str: + lines = [ + "services:", + " app:", + " volumes:", + " - ./:/app", + ] + if det.stack == "node": + lines.append(" - /app/node_modules") + return "\n".join(lines) + "\n" + + +def generate_env_example(det: Detection) -> str: + lines = [ + "# Copy this file to .env and adjust values for your machine.", + f"PORT={det.port}", + ] + if det.stack == "node": + lines.append("NODE_ENV=development") + elif det.stack == "python": + lines.append("PYTHONUNBUFFERED=1") + elif det.stack == "go": + lines.append("GO_ENV=development") + elif det.stack == "rust": + lines.append("RUST_LOG=info") + return "\n".join(lines) + "\n" + + +def write_or_print( + path: Path, + content: str, + *, + dry_run: bool, + force: bool, +) -> str: + if path.exists() and not force: + return "skipped (exists)" + if dry_run: + print(f"\n--- {path.name} ---") + print(content.rstrip()) + return "previewed" + path.write_text(content, encoding="utf-8") + return "written" + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Detect stack and generate Docker artifacts." + ) + parser.add_argument("--repo", default=".", help="Path to the repository root.") + parser.add_argument( + "--force", + action="store_true", + help="Overwrite existing generated files.", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Preview file content instead of writing files.", + ) + parser.add_argument( + "--with-env-example", + action="store_true", + help="Create .env.example when it is missing.", + ) + parser.add_argument( + "--with-compose-override", + action="store_true", + help="Create compose.override.yml when it is missing.", + ) + return parser.parse_args() + + +def main() -> int: + args = parse_args() + repo = Path(args.repo).expanduser().resolve() + if not repo.exists() or not repo.is_dir(): + raise RuntimeError(f"Repository path is invalid: {repo}") + + det = detect_project(repo) + + print(f"Detected stack: {det.stack}") + if det.framework: + print(f"Detected framework: {det.framework}") + if det.package_manager: + print(f"Detected package manager: {det.package_manager}") + for note in det.notes: + print(f"Note: {note}") + + outputs: list[tuple[Path, str]] = [ + (repo / "Dockerfile", generate_dockerfile(repo, det)), + (repo / ".dockerignore", generate_dockerignore(det)), + (repo / "docker-compose.yml", generate_compose(det)), + ] + + if args.with_env_example and not (repo / ".env.example").exists(): + outputs.append((repo / ".env.example", generate_env_example(det))) + if args.with_compose_override and not (repo / "compose.override.yml").exists(): + outputs.append((repo / "compose.override.yml", generate_compose_override(det))) + + print("\nGeneration plan:") + for path, content in outputs: + status = write_or_print(path, content, dry_run=args.dry_run, force=args.force) + print(f"- {path.name}: {status}") + + return 0 + + +if __name__ == "__main__": + raise SystemExit(main())