diff --git a/.codex/rules/default.rules b/.codex/rules/default.rules new file mode 100644 index 00000000..c54aeae2 --- /dev/null +++ b/.codex/rules/default.rules @@ -0,0 +1,4 @@ +prefix_rule(pattern=["uv"], decision="allow") +prefix_rule(pattern=["gh"], decision="allow") +prefix_rule(pattern=["just"], decision="allow") +prefix_rule(pattern=["git"], decision="allow") diff --git a/.codex/skills/agentify-repo/SKILL.md b/.codex/skills/agentify-repo/SKILL.md new file mode 100644 index 00000000..72c3e571 --- /dev/null +++ b/.codex/skills/agentify-repo/SKILL.md @@ -0,0 +1,42 @@ +--- +name: agentify-repo +description: "Incrementally add harness and tooling to an existing repo so it converges toward zach-stack safely over multiple passes." +--- + +# Agentify Repo + +Use this skill when asked to make an existing repo more agent-friendly without one-shot rewrites. + +## Core goal + +If required dependencies are not installed in the current environment, bootstrap them first: + +- `$install-skills https://github.com/openai/skills/tree/main/skills/.custom/zach-stack` +- `$install-skills https://github.com/openai/skills/tree/main/skills/.custom/configure-codex` + +Set up harness/tooling in small, reversible steps that make the repo more verifiable and easier for agents to work with. + +## Recommended flow + +1. Baseline assessment: detect current stack, docs state, tests, lints, and CI. +2. Harness setup phase: add/update only low-risk structure first (`docs/`, `Justfile`, command discoverability, test map). +3. Apply `zach-stack`-aligned improvements in small batches: + - tooling defaults relevant to current stack + - testing additions by priority + - docs upkeep and file ownership mapping +4. Validate each phase. +5. Pause with explicit next milestone. + +## Hard rules + +- No broad migration in one run unless explicitly authorized. +- Prefer additive changes and stable commit points. +- If unknown/legacy constraints exist, defer and document them instead of forcing defaults. +- Ask before any environment or rules-file changes that require trust-sensitive decisions. + +## References + +- `references/incremental-harness.md` +- `references/convergence-phases.md` +- `references/rollback-and-safety.md` +- `../zach-stack` for target conventions. diff --git a/.codex/skills/agentify-repo/agents/openai.yaml b/.codex/skills/agentify-repo/agents/openai.yaml new file mode 100644 index 00000000..c76ca4e4 --- /dev/null +++ b/.codex/skills/agentify-repo/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Agentify Repo" + short_description: "Incrementally harden existing repos with zach-stack conventions." + default_prompt: "Use $agentify-repo to add agent-friendly tooling in safe milestones." diff --git a/.codex/skills/agentify-repo/references/convergence-phases.md b/.codex/skills/agentify-repo/references/convergence-phases.md new file mode 100644 index 00000000..78a4ae3f --- /dev/null +++ b/.codex/skills/agentify-repo/references/convergence-phases.md @@ -0,0 +1,19 @@ +# Convergence phases + +## Phase 0: Discovery + +- Detect existing stack and constraints. +- Record what cannot be changed safely in one pass. + +## Phase 1: Surface hardening + +- Docs + Justfile + lightweight test discoverability. +- Add project-specific lint/test quick checks. + +## Phase 2: Toolchain alignment + +- Add `zach-stack` defaults where they do not disrupt current architecture. + +## Phase 3+: Optional deepening + +- Introduce additional stack pieces (workspaces, visualization layout, docs automation, etc.) as explicit follow-up passes. diff --git a/.codex/skills/agentify-repo/references/incremental-harness.md b/.codex/skills/agentify-repo/references/incremental-harness.md new file mode 100644 index 00000000..1ec9b7c1 --- /dev/null +++ b/.codex/skills/agentify-repo/references/incremental-harness.md @@ -0,0 +1,16 @@ +# Incremental harness setup + +## Purpose + +Build the operating surface needed for agents before enforcing deeper stack shifts. + +## Initial changes (safe first) + +- Add/normalize `docs/` with a current file-map and runbook. +- Add/refresh `Justfile` command palette. +- Introduce a lightweight test index and smoke test list. +- Clarify command/tool ownership in docs. + +## Principle + +The harness layer should reduce uncertainty first: if agents can discover and run stable commands, deeper changes are cheaper and safer. diff --git a/.codex/skills/agentify-repo/references/rollback-and-safety.md b/.codex/skills/agentify-repo/references/rollback-and-safety.md new file mode 100644 index 00000000..ce23b55b --- /dev/null +++ b/.codex/skills/agentify-repo/references/rollback-and-safety.md @@ -0,0 +1,20 @@ +# Rollback and safety + +## Why it matters + +Incremental changes are useful only if each step is reversible. + +## Safety pattern + +- Make one conceptual category of change at a time. +- Validate commands and tests after each change set. +- Record a simple checkpoint note: + - What changed + - Why it changed + - How to revert + +## Example rollback triggers + +- Unexpected behavioral diff +- Test coverage mismatch introduced by command changes +- Documentation or automation drift diff --git a/.codex/skills/configure-codex/SKILL.md b/.codex/skills/configure-codex/SKILL.md new file mode 100644 index 00000000..ec80204b --- /dev/null +++ b/.codex/skills/configure-codex/SKILL.md @@ -0,0 +1,38 @@ +--- +name: configure-codex +description: "Conservatively configure `.codex/rules/default.rules` and Codex environment setup for repo and user ergonomics." +--- + +# Configure Codex + +Use this skill when asked to update Codex rules, command permissions, or environment setup for repository work. + +## Scope +- `.codex/rules/default.rules` updates. +- Command allow-list selection. +- Codex environment/worktree ergonomics. +- Guidance for repo-scoped vs user-scoped settings. + +## Guarded workflow + +1. Inventory current constraints and user intent. +2. Propose only relevant allowed command families (e.g., uv, just, pnpm, gh, glab). +3. Ask for confirmation before writing rule updates. +4. Apply minimally and explain impact. +5. Validate the new command surface and leave a follow-up note. + +## Conservative defaults + +- Keep allow-lists minimal. +- Prefer repo-only permissions over broad global overrides. +- Add no commands that are not used by the project. + +## Troubleshooting + +- If `.codex/rules` is not writable in the current workspace, update the user-level `/Users/zach.parent/.codex/rules/default.rules` after confirming scope and keep that in notes. + +## References + +- `references/rules-default.md` +- `references/allowed-commands-matrix.md` +- `references/codex-environment.md` diff --git a/.codex/skills/configure-codex/agents/openai.yaml b/.codex/skills/configure-codex/agents/openai.yaml new file mode 100644 index 00000000..ffaf935d --- /dev/null +++ b/.codex/skills/configure-codex/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Configure Codex" + short_description: "Set up codex rules and environment configuration safely." + default_prompt: "Use $configure-codex to propose and apply safe Codex configuration updates." diff --git a/.codex/skills/configure-codex/references/allowed-commands-matrix.md b/.codex/skills/configure-codex/references/allowed-commands-matrix.md new file mode 100644 index 00000000..a05fde93 --- /dev/null +++ b/.codex/skills/configure-codex/references/allowed-commands-matrix.md @@ -0,0 +1,17 @@ +# Allowed command matrix (`configure-codex`) + +## Always avoid by default + +- Blanket escalation of broad shell commands. +- Universal global allow-lists without project need. + +## Recommended matrix by project type + +- **Web-first**: `pnpm`, `just`, `gh`, optional `uv`. +- **Python-first**: `uv`, `just`, `gh`, optional `glab`. +- **Mixed**: web + python commands only where actively used. + +## Confirmation requirements + +- Ask before enabling commands not currently used by the repo. +- Keep user-visible logs of every newly enabled command. diff --git a/.codex/skills/configure-codex/references/codex-environment.md b/.codex/skills/configure-codex/references/codex-environment.md new file mode 100644 index 00000000..418065e9 --- /dev/null +++ b/.codex/skills/configure-codex/references/codex-environment.md @@ -0,0 +1,19 @@ +# Codex environment and worktree setup + +## Objective + +Make agent work predictable and repeatable in local and customer environments. + +## Setup areas + +- command presets for project startup +- worktree naming and path strategy +- reusable command sequences via `Justfile` +- lightweight onboarding docs for Codex behavior + +## Recommendations + +- Keep environment setup documented in `docs/`. +- Separate repo updates from user-profile updates. +- Validate environment commands after each phase. +- Use conservative defaults and escalate only as needed. diff --git a/.codex/skills/configure-codex/references/rules-default.md b/.codex/skills/configure-codex/references/rules-default.md new file mode 100644 index 00000000..ffdcd324 --- /dev/null +++ b/.codex/skills/configure-codex/references/rules-default.md @@ -0,0 +1,47 @@ +# `.codex/rules/default.rules` guidance + +## Principles + +- Keep permissions scoped to the project workflow. +- Prefer minimal, explicit commands. +- Ask before writing any potentially sensitive settings. + +## Typical command families + +- `uv`: Python install/test/runtime commands +- `just`: repeated repo task orchestration +- `pnpm`: node tooling for web apps +- `gh`: GitHub workflows and issue/pr management +- `glab`: GitLab/GLab workflow parity where used + +## Process + +- Propose a diff, confirm with user, then apply. +- Re-check the rule set after each commit boundary. +## Example `prefix_rule` syntax + +```python +prefix_rule( + pattern = ["uv", "sync"], + decision = "allow", + justification = "Allow project dependency sync outside sandbox", + match = [ + "uv sync", + "uv sync --locked", + ], + not_match = ["uv run sync"], +) +``` + +Use `match` and `not_match` as inline rule tests so malformed patterns fail fast when the rule file is loaded. + +## Validate the rules file + +Run a rule check after updating `default.rules`: + +```bash +codex execpolicy check --pretty --rules .codex/rules/default.rules -- uv sync +``` + +Use the same command for any command you changed in the allow list. + diff --git a/.codex/skills/init-repo/SKILL.md b/.codex/skills/init-repo/SKILL.md new file mode 100644 index 00000000..612d19e1 --- /dev/null +++ b/.codex/skills/init-repo/SKILL.md @@ -0,0 +1,40 @@ +--- +name: init-repo +description: "Initialize a new repo with a fast, testable, documented, agent-friendly setup using zach-stack defaults." +--- + +# Init Repo + +Use this skill when a user asks to start a new project or bootstrap an empty repository. + +## Workflow + +If required dependencies are missing in this environment, bootstrap them first: + +- `$install-skills https://github.com/openai/skills/tree/main/skills/.custom/zach-stack` +- `$install-skills https://github.com/openai/skills/tree/main/skills/.custom/configure-codex` + +1. Capture constraints and ask clarifying questions only if ambiguous. +2. Choose a stack profile: web, python, visualization, or mixed. +3. Create a phase-1 scaffold that is minimal but complete: + - `docs/` with setup and file map starter. + - `Justfile` with repeatable agent commands. + - Testing skeleton (unit + at least one integration check). + - Linting/pre-commit baseline for selected stack. + - Optional `.codex/rules/default.rules` proposal via `configure-codex`. +4. Add technology-specific defaults from `zach-stack`: + - web defaults (framework-light vs lightweight React) + - Python defaults (`uv`) + - workspace structure if mixed. +5. Define the first milestone commit boundary. + +## Guardrails + +- Keep `init-repo` incremental: prefer Phase 1 completion over full build-out. +- Default to minimal files and explain what is deferred. +- Never write sensitive agent/environment rules without explicit user confirmation. + +## References + +- Use `references/new-repo-runbook.md` for scaffold templates and command defaults. +- Use `../zach-stack` for stack-specific decisions. diff --git a/.codex/skills/init-repo/agents/openai.yaml b/.codex/skills/init-repo/agents/openai.yaml new file mode 100644 index 00000000..3aa15dd0 --- /dev/null +++ b/.codex/skills/init-repo/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Init Repo" + short_description: "Bootstrap a new repo with testable and agent-friendly defaults." + default_prompt: "Use $init-repo to scaffold a new repository with zach-stack conventions." diff --git a/.codex/skills/init-repo/references/new-repo-runbook.md b/.codex/skills/init-repo/references/new-repo-runbook.md new file mode 100644 index 00000000..d89c68c0 --- /dev/null +++ b/.codex/skills/init-repo/references/new-repo-runbook.md @@ -0,0 +1,21 @@ +# New repo runbook + +## Phase 1 scaffold (minimum) + +- Create `docs/` and add a compact onboarding file. +- Add `Justfile` with build/test/lint commands. +- Add dependency/tool defaults for selected stack (`pnpm` for web, `uv` for Python). +- Add testing skeleton for targeted unit and integration coverage. +- Add workspace/docs conventions in notes for future phases. + +## Recommended first tasks + +- Choose stack from `zach-stack` profiles. +- Add contributor-level checklist for tests and docs updates. +- Add a clean rollback point before changing environment/config files. + +## Suggested outputs by stack + +- Web: Tailwind baseline + lightweight project setup + test/check commands. +- Python: `pyproject.toml`/`uv` convention + basic test command. +- Mixed: two-phase plan with clear module boundaries and shared-data workspace notes. diff --git a/.codex/skills/zach-stack/SKILL.md b/.codex/skills/zach-stack/SKILL.md new file mode 100644 index 00000000..ca1398ab --- /dev/null +++ b/.codex/skills/zach-stack/SKILL.md @@ -0,0 +1,58 @@ +--- +name: zach-stack +description: "Define opinionated conventions for testable, well-documented, agent-friendly repos. Use for stack selection and application defaults." +--- + +# Zach-Stack + +Use this skill when asked to define or apply a preferred stack for a new/existing project. + +## Purpose + +`zach-stack` is the compact decision source for: +- project scaffolding defaults +- tooling conventions +- test strategy expectations +- docs/lifecycle defaults +- Codex-friendly repository structure + +## When to use + +- before or during `init-repo` +- during `agentify-repo` as the target conventions +- when a team member asks for your preferred stack defaults + +## Core rules (high signal) + +- **Minimal web by default**: prefer plain HTML/CSS/JS when requirements are simple. +- **Dynamic web**: choose a lightweight React setup when needed for stateful interactions. +- **Complex static sites**: prefer Eleventy. +- **CSS**: default to Tailwind. +- **Python work**: use `uv` for package management and virtual environments. +- **Workspaces**: prefer workspace-based organization for multi-module projects. +- **CLI stack**: prefer **Typer** for new CLIs; use **Click** when existing codebases already use it. +- **Testing**: add targeted unit tests and at least one integration/acceptance layer. +- **Frontend checks**: include automated checks that cover real user workflows. +- **Playwright path**: use Playwright CLI for exploration and flow debugging; use Playwright-driven E2E where applicable. +- **Visualization**: prefer Streamlit + Plotly for analytics UI. +- **Project shape for shared data**: use a dedicated workspace/module for data, plus workspace boundaries for Streamlit/compute when needed. +- **Standalone scripts**: keep CLIs standalone with minimal dependencies and explicit script-level dependency boundaries; keep CLIs minimally scoped. +- **Docs as source of truth**: every project must have `docs/` and keep it current with code changes. +- **Automation**: include `Justfile` in most repos for repeatable agent tasks. +- **Pre-commit**: always define pre-commit via project-native tooling (`pnpm` or `uv`). + +## Process + +1. Ask for project intent: web app, python service, data app, or mixed. +2. Select a minimal stack from these preferences. +3. Confirm constraints that override defaults (security, infra, legacy platform). +4. Apply only what is relevant for the project phase. + +## References + +- `references/web.md` for web defaults, CSS, and Playwright use. +- `references/python.md` for UV, workspace, and Python conventions. +- `references/testing.md` for test structure expectations. +- `references/docs.md` for living docs patterns. +- `references/workspaces.md` for shared workspaces in mixed stacks. +- `references/resources.md` for external source references. diff --git a/.codex/skills/zach-stack/agents/openai.yaml b/.codex/skills/zach-stack/agents/openai.yaml new file mode 100644 index 00000000..12336df6 --- /dev/null +++ b/.codex/skills/zach-stack/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Zach Stack" + short_description: "Opinionated stack defaults for agent-friendly repos." + default_prompt: "Use $zach-stack to define technology and workflow defaults for the repo." diff --git a/.codex/skills/zach-stack/references/docs.md b/.codex/skills/zach-stack/references/docs.md new file mode 100644 index 00000000..4479841d --- /dev/null +++ b/.codex/skills/zach-stack/references/docs.md @@ -0,0 +1,19 @@ +# Documentation conventions (`zach-stack`) + +## Minimum + +- Every repo must include `docs/`. +- Docs should include a file index/structure map and quick update points. +- Keep docs updated as part of meaningful code changes. + +## Practical expectations + +- Maintain short-lived markdown docs that stay current with code. +- Include architecture and setup notes in `docs/` for onboarding. +- Keep one canonical source-of-truth for setup commands and developer workflows. +- Add a short, versioned list of files and responsibility in `docs/file-map.md` (or equivalent). + +## Docs quality and checks + +- Use docs linting where practical to catch stale references. +- Treat docs quality as part of completion criteria before considering a change done. diff --git a/.codex/skills/zach-stack/references/python.md b/.codex/skills/zach-stack/references/python.md new file mode 100644 index 00000000..3807a99e --- /dev/null +++ b/.codex/skills/zach-stack/references/python.md @@ -0,0 +1,33 @@ +# Python conventions (`zach-stack`) + +## Scope +Use for Python-heavy or mixed repos with Python services/modules. + +## Tooling defaults + +- Use `uv` for dependency/developer workflow. +- Prefer workspace separation when there are multiple independent Python modules. +- Use a dedicated `workspace` structure when teams need clean boundaries for CLI, service, library, and jobs. + +## CLI conventions + +- Prefer **Typer** for new CLI development. +- Accept **Click** when existing codepaths or existing teams standardize on Click. +- Keep CLI entrypoints near interfaces and minimize coupling to service internals. +- Make scripts standalone where practical: + - put CLI dependency declarations in the nearest packaging boundary + - avoid importing large module stacks at module import time + - keep defaults explicit to reduce runtime surprises + +## App architecture recommendations + +- If data is consumed by both core services and visualization: + - create a separate data workspace/project for shared contracts and loaders + - create separate consumer workspaces (for example API layer, Streamlit app) +- For data visualization: use Streamlit + Plotly. +- If the project is small and one-purpose, keep one workspace and add explicit module boundaries. + +## Maintenance and tests + +- Keep unit tests near implementation modules. +- Add integration tests for external boundaries (HTTP handlers, file contracts, job inputs). diff --git a/.codex/skills/zach-stack/references/resources.md b/.codex/skills/zach-stack/references/resources.md new file mode 100644 index 00000000..bbd5eacb --- /dev/null +++ b/.codex/skills/zach-stack/references/resources.md @@ -0,0 +1,13 @@ +# External references for `zach-stack` + +- [Agent Skills homepage](https://agentskills.io/home) +- [Agent Skills specification](https://agentskills.io/specification) +- [What are Skills?](https://agentskills.io/what-are-skills) +- [Harness engineering at OpenAI](https://openai.com/index/harness-engineering/) +- [Testing Agent Skills with Evals](https://developers.openai.com/blog/eval-skills/) + +## Why these references are here + +- Keep SKILL files concise and point to standards-based context when needed. +- Encourage incremental, low-friction automation. +- Reinforce evaluation and quality checks for repeatable agent behavior. diff --git a/.codex/skills/zach-stack/references/testing.md b/.codex/skills/zach-stack/references/testing.md new file mode 100644 index 00000000..e716a44b --- /dev/null +++ b/.codex/skills/zach-stack/references/testing.md @@ -0,0 +1,45 @@ +# Testing conventions (`zach-stack`) + +## CLI strategy defaults + +- **CLI frameworks**: prefer `Typer` for new CLI implementations. +- **CLI alternatives**: `Click` is acceptable for existing or click-first codebases and when explicit subcommand ergonomics are required. +- **Dependency hygiene**: keep CLI modules lightweight; make scripts as self-contained and dependency-bounded as possible by: + - placing entrypoints in dedicated files/modules + - scoping dependencies to the package/script boundary + - avoiding monolithic "utility" scripts with broad transitive imports + +## Test expectations by framework + +- Add focused unit tests for parsing, argument validation, and command routing. +- Add integration-style tests for command side effects (filesystem, subprocess calls, exit codes). +- For **Typer** CLIs: + - test commands with `click.testing.CliRunner` syntax (Typer uses Click internally) + - verify exit codes and output + - verify help text and global options +- For **Click** CLIs: + - test via `click.testing.CliRunner` + - validate success and failure paths + - include edge-case tests around argument parsing and exit codes +- For both, assert deterministic outputs where possible. + +## Frontend checks + +- For UI work, include interaction-level validation against real page behavior (manual Playwright flow check and automated checks where feasible). + +## Docs/test coupling + +- Every major behavior change should include an updated doc entry describing intent and validation command. +- Avoid relying on broad integration tests only; use focused tests for failure isolation. + +## CLI command style for single-command scripts + +- For scripts intended to have a single action, prefer exposing the behavior on the default command and avoid a dedicated `run` subcommand label. +- This keeps invocation simple for agents: `uv run scripts/.py`. + +## Examples + +- `uv run scripts/validate-custom-skills.py` (default entrypoint) +- `uv run scripts/run-skill-evals.py` +- `uv run scripts/test-custom-skills.py` +- `uv run scripts/sync-custom-skill.py sync --dry-run` diff --git a/.codex/skills/zach-stack/references/web.md b/.codex/skills/zach-stack/references/web.md new file mode 100644 index 00000000..e7c7e814 --- /dev/null +++ b/.codex/skills/zach-stack/references/web.md @@ -0,0 +1,28 @@ +# Web conventions (`zach-stack`) + +## Scope +Use when a project includes frontend code and needs a consistent, lightweight stack. + +## Conventions + +- Use lightweight, framework-free implementations for simple pages. +- For dynamic client behavior, prefer lightweight React over heavy abstraction layers. +- Use Tailwind for styling defaults. +- For content-heavy or markdown-driven complex static sites, use Eleventy. +- Use `pnpm` as package manager by default for web tooling. +- Keep browser-facing code testable with at least: + - unit-level checks for pure functions/components (or equivalent) + - integration tests for user flows + - end-to-end checks for critical UI paths + +## Playwright and checks + +- For interactive UI exploration and triage, use Playwright CLI first. +- For stable e2e coverage, standardize on Playwright-based automation in CI for key flows. +- For Python-facing web UIs, include the Python Playwright stack where helpful. +- Keep selectors and test semantics resilient to refactors. + +## Decision points + +- If no interactive state and no component orchestration are needed: no React. +- If there are interactive widgets, state-driven forms, or realtime updates: use React. diff --git a/.codex/skills/zach-stack/references/workspaces.md b/.codex/skills/zach-stack/references/workspaces.md new file mode 100644 index 00000000..554254eb --- /dev/null +++ b/.codex/skills/zach-stack/references/workspaces.md @@ -0,0 +1,12 @@ +# Workspace conventions (`zach-stack`) + +## Rationale + +Use workspaces to separate concerns, reduce coupling, and support agent navigation. + +## Typical patterns + +- `workspace` for source code organization when multiple modules are expected. +- Separate project for shared data access layer if data feeds multiple components. +- Separate Streamlit app workspace for visualization and UX. +- Keep dependency graphs explicit and directional (core < data < apps). diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml new file mode 100644 index 00000000..7b40db40 --- /dev/null +++ b/.github/workflows/check.yml @@ -0,0 +1,47 @@ +name: check + +on: + pull_request: + push: + branches: + - main + +jobs: + check: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Set up uv + uses: astral-sh/setup-uv@v7 + with: + version: 0.10.0 + enable-cache: true + + - name: Install project dependencies + run: uv sync --locked + + - name: Install pre-commit hooks + run: uv run pre-commit install --install-hooks + + - name: Run pre-commit checks + run: uv run pre-commit run --all-files + + - name: Run type checks + run: uv run mypy + + - name: Run skill tests + run: uv run scripts/test-custom-skills.py + + - name: Run skill evals + run: uv run scripts/run-skill-evals.py + + - name: Prune uv cache for CI + if: always() + run: uv cache prune --ci diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..964ce0cf --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,33 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.6.4 + hooks: + - id: ruff + name: ruff lint + args: ["--fix", "--exit-non-zero-on-fix"] + - id: ruff-format + name: ruff format + + - repo: local + hooks: + - id: skills-lint + name: custom skills lint/tests + entry: ./scripts/test-custom-skills.py + language: system + pass_filenames: false + + - repo: local + hooks: + - id: skills-evals + name: custom skill evals + entry: ./scripts/run-skill-evals.py + language: system + pass_filenames: false + + - repo: local + hooks: + - id: mypy-scripts + name: mypy script type check + entry: uv run mypy + language: system + files: ^scripts/.*\.py$ diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..fadac2a0 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,22 @@ +# Repository Agents Instructions + +- Purpose: this repository is a personal fork of the OpenAI Skills catalog plus a `.custom` namespace for your own workflows. +- Scope: keep the public skill catalogs untouched unless explicitly requested. +- Keep new skills focused on role and workflow, and place detailed guidance under `references/`. +- Prefer concise SKILL files with one-level references. +- Use incremental changes for this skills suite and commit in logical milestones. +- Do not add scripts unless they are essential and deterministic. +- No README/CHANGELOG style collateral is required inside a skill. + +## Linting and tests + +Run `uv run scripts/test-custom-skills.py` before committing `.custom` changes. + +Install pre-commit hooks once with: +`PRE_COMMIT_HOME=.pre-commit-cache pre-commit install --install-hooks`. + +Run pre-commit checks: +`pre-commit run --all-files` (or `just pre-commit-check`). + +Hooks install command in this repo uses a local pre-commit home: +`PRE_COMMIT_HOME=.pre-commit-cache pre-commit install --install-hooks`. diff --git a/evals/agentify-repo/prompt_set.csv b/evals/agentify-repo/prompt_set.csv new file mode 100644 index 00000000..ebcc7e71 --- /dev/null +++ b/evals/agentify-repo/prompt_set.csv @@ -0,0 +1,4 @@ +prompt_id,prompt +incremental_harness_existing_repo,"Make this existing repo more agent-friendly, but do it incrementally and avoid risky broad rewrites." +legacy_constraints_present,"Harden a legacy repo for agents. There are unknown constraints and we need reversible changes." +rules_change_request,"Improve agent workflow in a repo and consider codex/rules changes only if necessary." diff --git a/evals/agentify-repo/test.jsonl b/evals/agentify-repo/test.jsonl new file mode 100644 index 00000000..941603e2 --- /dev/null +++ b/evals/agentify-repo/test.jsonl @@ -0,0 +1,3 @@ +{"case_id":"skill-structure","prompt_id":"incremental_harness_existing_repo","skill":"agentify-repo","checks":[{"kind":"file_exists","path":"SKILL.md"},{"kind":"file_exists","path":"agents/openai.yaml"},{"kind":"file_exists","path":"references/incremental-harness.md"},{"kind":"file_exists","path":"references/convergence-phases.md"},{"kind":"file_exists","path":"references/rollback-and-safety.md"},{"kind":"frontmatter_field_equals","path":"SKILL.md","field":"name","value":"agentify-repo"},{"kind":"contains_text","path":"agents/openai.yaml","text":"$agentify-repo"}]} +{"case_id":"incremental-flow-and-guardrails","prompt_id":"legacy_constraints_present","skill":"agentify-repo","checks":[{"kind":"contains_all","path":"SKILL.md","texts":["Baseline assessment: detect current stack, docs state, tests, lints, and CI.","Harness setup phase: add/update only low-risk structure first","Apply `zach-stack`-aligned improvements in small batches","No broad migration in one run unless explicitly authorized.","Prefer additive changes and stable commit points.","If unknown/legacy constraints exist, defer and document them instead of forcing defaults."]}]} +{"case_id":"trust-sensitive-rules-changes","prompt_id":"rules_change_request","skill":"agentify-repo","checks":[{"kind":"contains_text","path":"SKILL.md","text":"Ask before any environment or rules-file changes that require trust-sensitive decisions."},{"kind":"contains_text","path":"SKILL.md","text":"../zach-stack"}]} diff --git a/evals/configure-codex/prompt_set.csv b/evals/configure-codex/prompt_set.csv new file mode 100644 index 00000000..97d9c7f9 --- /dev/null +++ b/evals/configure-codex/prompt_set.csv @@ -0,0 +1,4 @@ +prompt_id,prompt +minimal_rules_update,"Update Codex rules for this repo, but keep the allowed commands minimal and only what the project needs." +repo_vs_user_scope,"Set up Codex configuration for a repo, and explain when repo-scoped vs user-scoped settings should be used." +rules_dir_not_writable,"Configure Codex, but assume the repo .codex/rules path is not writable." diff --git a/evals/configure-codex/test.jsonl b/evals/configure-codex/test.jsonl new file mode 100644 index 00000000..660af237 --- /dev/null +++ b/evals/configure-codex/test.jsonl @@ -0,0 +1,3 @@ +{"case_id":"skill-structure","prompt_id":"minimal_rules_update","skill":"configure-codex","checks":[{"kind":"file_exists","path":"SKILL.md"},{"kind":"file_exists","path":"agents/openai.yaml"},{"kind":"file_exists","path":"references/rules-default.md"},{"kind":"file_exists","path":"references/allowed-commands-matrix.md"},{"kind":"file_exists","path":"references/codex-environment.md"},{"kind":"frontmatter_field_equals","path":"SKILL.md","field":"name","value":"configure-codex"},{"kind":"contains_text","path":"agents/openai.yaml","text":"$configure-codex"}]} +{"case_id":"guarded-workflow-and-minimal-permissions","prompt_id":"minimal_rules_update","skill":"configure-codex","checks":[{"kind":"contains_all","path":"SKILL.md","texts":[".codex/rules/default.rules","Propose only relevant allowed command families","Ask for confirmation before writing rule updates.","Keep allow-lists minimal.","Prefer repo-only permissions over broad global overrides.","Add no commands that are not used by the project."]}]} +{"case_id":"fallback-user-level-rules-path","prompt_id":"rules_dir_not_writable","skill":"configure-codex","checks":[{"kind":"contains_text","path":"SKILL.md","text":"/Users/zach.parent/.codex/rules/default.rules"},{"kind":"contains_text","path":"SKILL.md","text":"after confirming scope"}]} diff --git a/evals/init-repo/prompt_set.csv b/evals/init-repo/prompt_set.csv new file mode 100644 index 00000000..18824d33 --- /dev/null +++ b/evals/init-repo/prompt_set.csv @@ -0,0 +1,4 @@ +prompt_id,prompt +bootstrap_python_repo,"Start a new Python project and scaffold a minimal but complete phase-1 repository for agent-friendly work." +bootstrap_mixed_repo,"Initialize a mixed project repo with docs, testing, and repeatable commands, but keep it incremental." +rules_confirmation_needed,"Bootstrap a new repo and suggest Codex rules only if the user explicitly confirms." diff --git a/evals/init-repo/test.jsonl b/evals/init-repo/test.jsonl new file mode 100644 index 00000000..93ac8736 --- /dev/null +++ b/evals/init-repo/test.jsonl @@ -0,0 +1,3 @@ +{"case_id":"skill-structure","prompt_id":"bootstrap_python_repo","skill":"init-repo","checks":[{"kind":"file_exists","path":"SKILL.md"},{"kind":"file_exists","path":"agents/openai.yaml"},{"kind":"file_exists","path":"references/new-repo-runbook.md"},{"kind":"frontmatter_field_equals","path":"SKILL.md","field":"name","value":"init-repo"},{"kind":"contains_text","path":"agents/openai.yaml","text":"$init-repo"}]} +{"case_id":"phase1-scaffold-expectations","prompt_id":"bootstrap_mixed_repo","skill":"init-repo","checks":[{"kind":"contains_all","path":"SKILL.md","texts":["Choose a stack profile: web, python, visualization, or mixed.","Create a phase-1 scaffold that is minimal but complete:","`docs/` with setup and file map starter.","`Justfile` with repeatable agent commands.","Testing skeleton (unit + at least one integration check).","Linting/pre-commit baseline for selected stack.","Add technology-specific defaults from `zach-stack`:"]}]} +{"case_id":"rules-safety-and-zach-stack-linkage","prompt_id":"rules_confirmation_needed","skill":"init-repo","checks":[{"kind":"contains_text","path":"SKILL.md","text":"Optional `.codex/rules/default.rules` proposal via `configure-codex`."},{"kind":"contains_text","path":"SKILL.md","text":"Never write sensitive agent/environment rules without explicit user confirmation."},{"kind":"contains_text","path":"SKILL.md","text":"Use `../zach-stack` for stack-specific decisions."}]} diff --git a/evals/zach-stack/prompt_set.csv b/evals/zach-stack/prompt_set.csv new file mode 100644 index 00000000..8756e00c --- /dev/null +++ b/evals/zach-stack/prompt_set.csv @@ -0,0 +1,4 @@ +prompt_id,prompt +choose_stack_defaults,"What stack defaults should we use for a new project so it stays testable and agent-friendly?" +web_and_python_preferences,"Summarize your preferred web and Python tooling conventions for a repo." +data_app_recommendation,"Recommend a stack for an analytics/data app with shared data and a lightweight UI." diff --git a/evals/zach-stack/test.jsonl b/evals/zach-stack/test.jsonl new file mode 100644 index 00000000..5da11889 --- /dev/null +++ b/evals/zach-stack/test.jsonl @@ -0,0 +1,3 @@ +{"case_id":"skill-structure","prompt_id":"choose_stack_defaults","skill":"zach-stack","checks":[{"kind":"file_exists","path":"SKILL.md"},{"kind":"file_exists","path":"agents/openai.yaml"},{"kind":"file_exists","path":"references/web.md"},{"kind":"file_exists","path":"references/python.md"},{"kind":"file_exists","path":"references/testing.md"},{"kind":"file_exists","path":"references/docs.md"},{"kind":"file_exists","path":"references/workspaces.md"},{"kind":"file_exists","path":"references/resources.md"},{"kind":"frontmatter_field_equals","path":"SKILL.md","field":"name","value":"zach-stack"},{"kind":"contains_text","path":"agents/openai.yaml","text":"$zach-stack"}]} +{"case_id":"core-defaults-cover-key-technologies","prompt_id":"web_and_python_preferences","skill":"zach-stack","checks":[{"kind":"contains_all","path":"SKILL.md","texts":["Minimal web by default","Tailwind","use `uv` for package management and virtual environments.","prefer **Typer** for new CLIs; use **Click**","Playwright","prefer Streamlit + Plotly for analytics UI.","include `Justfile` in most repos","always define pre-commit via project-native tooling"]}]} +{"case_id":"process-and-reference-surface","prompt_id":"data_app_recommendation","skill":"zach-stack","checks":[{"kind":"contains_all","path":"SKILL.md","texts":["Ask for project intent: web app, python service, data app, or mixed.","Confirm constraints that override defaults","`references/web.md`","`references/python.md`","`references/testing.md`","`references/docs.md`","`references/workspaces.md`","`references/resources.md`"]}]} diff --git a/justfile b/justfile new file mode 100644 index 00000000..1b9a7ea9 --- /dev/null +++ b/justfile @@ -0,0 +1,26 @@ +default: + @just --list + +pre-commit: + uv run pre-commit install --install-hooks + +pre-commit-check: + uv run pre-commit run --all-files + +test: + uv run scripts/test-custom-skills.py + +sync-skills: + uv run scripts/sync-custom-skills.py sync + +check-skills: + uv run scripts/sync-custom-skills.py check + +checks: + uv run pre-commit run --all-files + uv run scripts/test-custom-skills.py + uv run scripts/run-skill-evals.py + +sync: + uv sync + diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..bd9d6903 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,42 @@ +[project] +name = "skills" +version = "0.0.0" +description = "Personal tooling and skill-catalog support scripts for the skills repo." +requires-python = ">=3.12" +dependencies = [ + "typer>=0.12", + "pre-commit>=3.7", + "ruff>=0.6", + "mypy>=1.10", +] + +[tool.ruff] +line-length = 100 +target-version = "py312" +exclude = [ + "skills/.curated", + "skills/.system", + "skills/.codex", + ".pre-commit-cache", +] + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.mypy] +python_version = "3.12" +pretty = true +show_error_codes = true +warn_unused_configs = true +disable_error_code = ["annotation-unchecked"] +files = [ + "scripts", +] +exclude = [ + "skills/.curated", + "skills/.system", + "skills/.codex", + ".git", + ".venv", +] diff --git a/scripts/run-skill-evals.py b/scripts/run-skill-evals.py new file mode 100755 index 00000000..405ef019 --- /dev/null +++ b/scripts/run-skill-evals.py @@ -0,0 +1,350 @@ +#!/usr/bin/env -S uv run +# /// script +# requires-python = ">=3.12" +# dependencies = ["typer>=0.12"] +# /// + +"""Run local skill eval suites from root `evals/` (eval-skills inspired layout).""" + +from __future__ import annotations + +import csv +import json +from pathlib import Path +from typing import Any + +import typer + +REPO = Path(__file__).resolve().parents[1] +CUSTOM_SKILLS_DIR = REPO / "skills" / ".custom" +DEFAULT_EVAL_DIR = REPO / "evals" + +app = typer.Typer(help="Run root `evals/` suites against `.custom` skills.") + + +def parse_frontmatter(text: str) -> dict[str, str]: + if not text.startswith("---\n"): + return {} + parts = text.split("---", 2) + if len(parts) < 3: + return {} + _, raw_fm, _ = parts + result: dict[str, str] = {} + for line in raw_fm.splitlines(): + if ":" not in line: + continue + key, value = line.split(":", 1) + result[key.strip()] = value.strip().strip('"') + return result + + +def _read_text_cached( + cache: dict[Path, str], + skill_dir: Path, + rel_path: str, +) -> tuple[Path, str | None]: + target = skill_dir / rel_path + if target in cache: + return target, cache[target] + if not target.exists() or not target.is_file(): + return target, None + content = target.read_text() + cache[target] = content + return target, content + + +def run_check( + *, + suite_name: str, + case_id: str, + skill_name: str, + skill_dir: Path, + check: dict[str, Any], + text_cache: dict[Path, str], +) -> tuple[int, list[str]]: + kind = str(check.get("kind", "")).strip() + passed = 0 + failures: list[str] = [] + label = f"{suite_name}/{case_id}" + + if kind == "file_exists": + rel_path = str(check.get("path", "")).strip() + target = skill_dir / rel_path + if rel_path and target.exists(): + return 1, [] + failures.append(f"{label}: {skill_name} missing file {rel_path or ''}") + return 0, failures + + if kind == "contains_text": + rel_path = str(check.get("path", "SKILL.md")).strip() + needle = str(check.get("text", "")) + target, content = _read_text_cached(text_cache, skill_dir, rel_path) + if content is None: + failures.append(f"{label}: {skill_name} missing file {target.relative_to(REPO)}") + return 0, failures + if needle and needle in content: + return 1, [] + failures.append(f"{label}: {skill_name} missing text {needle!r} in {rel_path}") + return 0, failures + + if kind == "contains_all": + rel_path = str(check.get("path", "SKILL.md")).strip() + texts_raw = check.get("texts", []) + texts = [str(item) for item in texts_raw] if isinstance(texts_raw, list) else [] + target, content = _read_text_cached(text_cache, skill_dir, rel_path) + if content is None: + failures.append(f"{label}: {skill_name} missing file {target.relative_to(REPO)}") + return 0, failures + if not texts: + failures.append(f"{label}: {skill_name} contains_all check has no texts") + return 0, failures + missing = [needle for needle in texts if needle not in content] + if not missing: + return 1, [] + failures.append( + f"{label}: {skill_name} missing {len(missing)} expected text(s) in {rel_path}: " + + ", ".join(repr(item) for item in missing) + ) + return 0, failures + + if kind == "frontmatter_field_equals": + rel_path = str(check.get("path", "SKILL.md")).strip() + field = str(check.get("field", "")).strip() + expected = str(check.get("value", "")) + target, content = _read_text_cached(text_cache, skill_dir, rel_path) + if content is None: + failures.append(f"{label}: {skill_name} missing file {target.relative_to(REPO)}") + return 0, failures + frontmatter = parse_frontmatter(content) + actual = frontmatter.get(field) + if field and actual == expected: + return 1, [] + failures.append( + f"{label}: {skill_name} frontmatter {field!r} expected {expected!r}, got {actual!r}" + ) + return 0, failures + + failures.append(f"{label}: {skill_name} unknown check type {kind!r}") + return passed, failures + + +def load_prompt_set(path: Path) -> dict[str, dict[str, str]]: + with path.open(newline="") as handle: + reader = csv.DictReader(handle) + fieldnames = set(reader.fieldnames or []) + required = {"prompt_id", "prompt"} + missing_headers = sorted(required - fieldnames) + if missing_headers: + raise SystemExit( + f"FAIL: {path.relative_to(REPO)} missing CSV headers: {', '.join(missing_headers)}" + ) + + prompts: dict[str, dict[str, str]] = {} + for index, row in enumerate(reader, start=2): + prompt_id = (row.get("prompt_id") or "").strip() + prompt = (row.get("prompt") or "").strip() + if not prompt_id: + raise SystemExit(f"FAIL: {path.relative_to(REPO)} row {index} missing prompt_id") + if not prompt: + raise SystemExit(f"FAIL: {path.relative_to(REPO)} row {index} missing prompt") + if prompt_id in prompts: + raise SystemExit( + f"FAIL: {path.relative_to(REPO)} duplicate prompt_id {prompt_id!r}" + ) + prompts[prompt_id] = {k: (v or "") for k, v in row.items()} + return prompts + + +def load_test_cases(path: Path) -> list[dict[str, Any]]: + cases: list[dict[str, Any]] = [] + for index, raw_line in enumerate(path.read_text().splitlines(), start=1): + line = raw_line.strip() + if not line: + continue + try: + data = json.loads(line) + except json.JSONDecodeError as exc: + raise SystemExit( + f"FAIL: {path.relative_to(REPO)} line {index}: invalid JSON ({exc})" + ) from exc + if not isinstance(data, dict): + raise SystemExit(f"FAIL: {path.relative_to(REPO)} line {index}: expected JSON object") + cases.append(data) + if not cases: + raise SystemExit(f"FAIL: {path.relative_to(REPO)} has no test cases") + return cases + + +def run_suite(suite_dir: Path) -> dict[str, Any]: + suite_name = suite_dir.name + prompt_set_path = suite_dir / "prompt_set.csv" + test_path = suite_dir / "test.jsonl" + failures: list[str] = [] + case_results: list[dict[str, Any]] = [] + total_checks = 0 + passed_checks = 0 + + if not prompt_set_path.exists(): + return { + "suite": suite_name, + "status": "FAIL", + "prompts": 0, + "cases": 0, + "passed_checks": 0, + "total_checks": 0, + "failures": [f"{suite_name}: missing {prompt_set_path.relative_to(REPO)}"], + "case_results": [], + } + if not test_path.exists(): + return { + "suite": suite_name, + "status": "FAIL", + "prompts": 0, + "cases": 0, + "passed_checks": 0, + "total_checks": 0, + "failures": [f"{suite_name}: missing {test_path.relative_to(REPO)}"], + "case_results": [], + } + + prompts = load_prompt_set(prompt_set_path) + cases = load_test_cases(test_path) + text_cache: dict[Path, str] = {} + + for case in cases: + case_id = str(case.get("case_id", "")).strip() + prompt_id = str(case.get("prompt_id", "")).strip() + skill_name = str(case.get("skill", suite_name)).strip() + checks_raw = case.get("checks", []) + checks = ( + [item for item in checks_raw if isinstance(item, dict)] + if isinstance(checks_raw, list) + else [] + ) + + case_failures: list[str] = [] + case_passed = 0 + + if not case_id: + case_failures.append(f"{suite_name}: case missing case_id") + if not prompt_id: + case_failures.append( + f"{suite_name}/{case_id or ''}: missing prompt_id" + ) + elif prompt_id not in prompts: + case_failures.append(f"{suite_name}/{case_id}: unknown prompt_id {prompt_id!r}") + + skill_dir = CUSTOM_SKILLS_DIR / skill_name + if not skill_name: + case_failures.append( + f"{suite_name}/{case_id or ''}: missing skill name" + ) + elif not skill_dir.is_dir(): + case_failures.append( + f"{suite_name}/{case_id}: missing skill directory {skill_dir.relative_to(REPO)}" + ) + + if not checks: + case_failures.append( + f"{suite_name}/{case_id or ''}: case has no checks" + ) + + if not case_failures and skill_name: + for check in checks: + passed, check_failures = run_check( + suite_name=suite_name, + case_id=case_id, + skill_name=skill_name, + skill_dir=skill_dir, + check=check, + text_cache=text_cache, + ) + total_checks += 1 + passed_checks += passed + case_passed += passed + case_failures.extend(check_failures) + else: + total_checks += len(checks) + + case_status = "PASS" if not case_failures else "FAIL" + failures.extend(case_failures) + case_results.append( + { + "case_id": case_id, + "prompt_id": prompt_id, + "skill": skill_name, + "status": case_status, + "passed_checks": case_passed, + "total_checks": len(checks), + "failures": case_failures, + } + ) + + return { + "suite": suite_name, + "status": "PASS" if not failures else "FAIL", + "prompts": len(prompts), + "cases": len(cases), + "passed_checks": passed_checks, + "total_checks": total_checks, + "failures": failures, + "case_results": case_results, + } + + +def run_evals(eval_dir: Path) -> int: + if not eval_dir.exists(): + print(f"FAIL: no evals directory found: {eval_dir}") + return 1 + if not eval_dir.is_dir(): + print(f"FAIL: eval path is not a directory: {eval_dir}") + return 1 + + suite_dirs = [ + path + for path in sorted(eval_dir.iterdir()) + if path.is_dir() and not path.name.startswith(".") + ] + if not suite_dirs: + print(f"FAIL: no eval suites found in {eval_dir}") + return 1 + + results = [run_suite(suite_dir) for suite_dir in suite_dirs] + print(json.dumps(results, indent=2)) + + failed = [result for result in results if result["status"] == "FAIL"] + if failed: + for suite in failed: + suite_name = str(suite.get("suite")) + suite_failures = suite.get("failures", []) + if isinstance(suite_failures, list): + for message in suite_failures: + print(f"FAIL {suite_name} :: {message}") + else: + print(f"FAIL {suite_name} :: {suite_failures}") + return 1 + + total_suites = len(results) + total_cases = sum(int(item.get("cases", 0)) for item in results) + total_checks = sum(int(item.get("total_checks", 0)) for item in results) + print( + "PASS: all skill evals passed " + f"({total_suites} suites, {total_cases} cases, {total_checks} checks)" + ) + return 0 + + +@app.callback(invoke_without_command=True) +def main( + eval_dir: Path = typer.Option( + DEFAULT_EVAL_DIR, + "--eval-dir", + help="Path to root eval suites directory.", + ), +) -> None: + """Run root eval suites against `.custom` skills.""" + raise typer.Exit(code=run_evals(eval_dir)) + + +if __name__ == "__main__": + app() diff --git a/scripts/sync-custom-skills.py b/scripts/sync-custom-skills.py new file mode 100755 index 00000000..e831c970 --- /dev/null +++ b/scripts/sync-custom-skills.py @@ -0,0 +1,193 @@ +#!/usr/bin/env -S uv run +# /// script +# requires-python = ">=3.12" +# dependencies = ["typer>=0.12"] +# /// + +"""Compile and verify `.custom` skills vs `.codex/skills` mirror.""" + +from __future__ import annotations + +import shutil +from pathlib import Path + +import typer + +REPO_ROOT = Path(__file__).resolve().parents[1] +DEFAULT_CUSTOM_ROOT = REPO_ROOT / "skills" / ".custom" +DEFAULT_MIRROR_ROOT = REPO_ROOT / ".codex" / "skills" + +app = typer.Typer(help="Compile and verify .custom skills mirror.") + + +def _resolve(path_like: str | Path) -> Path: + value = Path(path_like) + return value if value.is_absolute() else (REPO_ROOT / value).resolve() + + +def sync_skills(custom_root: Path, mirror_root: Path, dry_run: bool = False) -> list[str]: + if not custom_root.is_dir(): + raise SystemExit(f"FAIL: custom skills root not found: {custom_root}") + + if dry_run: + return [ + skill_dir.name + for skill_dir in sorted(custom_root.iterdir()) + if ( + skill_dir.is_dir() + and not skill_dir.name.startswith(".") + and (skill_dir / "SKILL.md").is_file() + ) + ] + + mirror_root.mkdir(parents=True, exist_ok=True) + + # Clear any prior .custom marker in mirror root. + legacy_marker = mirror_root / ".custom" + if legacy_marker.is_symlink() or legacy_marker.exists(): + if legacy_marker.is_dir() and not legacy_marker.is_symlink(): + shutil.rmtree(legacy_marker) + else: + legacy_marker.unlink() + + expected: set[str] = set() + + for skill_dir in sorted(custom_root.iterdir()): + if not skill_dir.is_dir(): + continue + if not (skill_dir / "SKILL.md").is_file(): + continue + + skill_name = skill_dir.name + expected.add(skill_name) + target = mirror_root / skill_name + + if target.exists(): + if target.is_dir(): + shutil.rmtree(target) + else: + target.unlink() + shutil.copytree(skill_dir, target) + + for entry in sorted(mirror_root.iterdir()): + if not entry.is_dir(): + continue + if not entry.name.startswith(".") and entry.name not in expected: + shutil.rmtree(entry) + + return sorted(expected) + + +def list_skill_dirs(root: Path) -> dict[str, set[tuple[str, ...]]]: + skills: dict[str, set[tuple[str, ...]]] = {} + if not root.exists(): + return skills + + for path in sorted(root.iterdir()): + if not path.is_dir(): + continue + if not (path / "SKILL.md").is_file(): + continue + files = {tuple(p.relative_to(path).parts) for p in path.rglob("*") if p.is_file()} + skills[path.name] = files + return skills + + +def check_skills(custom_root: Path, mirror_root: Path) -> int: + custom_skills = list_skill_dirs(custom_root) + mirror_skills = list_skill_dirs(mirror_root) + + custom_names = set(custom_skills) + mirror_names = set(mirror_skills) + + if custom_names != mirror_names: + missing = custom_names - mirror_names + extra = mirror_names - custom_names + if missing: + raise SystemExit( + f"FAIL: missing mirrored skills in .codex/skills: {', '.join(sorted(missing))}" + ) + if extra: + raise SystemExit( + f"FAIL: unexpected extra skills in .codex/skills: {', '.join(sorted(extra))}" + ) + + for name in sorted(custom_names): + if custom_skills[name] != mirror_skills.get(name, set()): + raise SystemExit(f"FAIL: mirroring mismatch for skill '{name}'") + + print("PASS: .custom and .codex/skills are mirrored") + return 0 + + +def _run_sync(custom_root: str, mirror_root: str, dry_run: bool) -> None: + custom_root_path = _resolve(custom_root) + mirror_root_path = _resolve(mirror_root) + skills = sync_skills(custom_root_path, mirror_root_path, dry_run=dry_run) + if dry_run: + print(f"dry-run: scanning {custom_root_path} -> {mirror_root_path}") + for skill in skills: + print(f"would sync: {skill}") + return + + for skill in skills: + print(f"synced: {Path('.codex/skills') / skill}") + + +def _run_check(custom_root: str, mirror_root: str) -> None: + custom_root_path = _resolve(custom_root) + mirror_root_path = _resolve(mirror_root) + check_skills(custom_root_path, mirror_root_path) + + +@app.command("sync") +def sync( + custom_root: str = typer.Option( + str(DEFAULT_CUSTOM_ROOT), + "--custom-root", + help="Source directory for custom skills.", + ), + mirror_root: str = typer.Option( + str(DEFAULT_MIRROR_ROOT), + "--mirror-root", + help="Target directory for mirrored skills.", + ), + dry_run: bool = typer.Option( + False, + "--dry-run", + help="Show planned sync updates without writing files.", + ), +) -> None: + """Copy `.custom` skills into `.codex/skills`.""" + _run_sync(custom_root=custom_root, mirror_root=mirror_root, dry_run=dry_run) + + +@app.command("check") +def check( + custom_root: str = typer.Option( + str(DEFAULT_CUSTOM_ROOT), + "--custom-root", + help="Source directory for custom skills.", + ), + mirror_root: str = typer.Option( + str(DEFAULT_MIRROR_ROOT), + "--mirror-root", + help="Target directory for mirrored skills.", + ), +) -> None: + """Verify mirrored tree matches `.custom` exactly.""" + _run_check(custom_root=custom_root, mirror_root=mirror_root) + + +@app.callback(invoke_without_command=True) +def _default(ctx: typer.Context) -> None: + if ctx.invoked_subcommand is None: + sync( + custom_root=str(DEFAULT_CUSTOM_ROOT), + mirror_root=str(DEFAULT_MIRROR_ROOT), + dry_run=False, + ) + + +if __name__ == "__main__": + app() diff --git a/scripts/test-custom-skills.py b/scripts/test-custom-skills.py new file mode 100755 index 00000000..96646540 --- /dev/null +++ b/scripts/test-custom-skills.py @@ -0,0 +1,49 @@ +#!/usr/bin/env -S uv run +# /// script +# requires-python = ">=3.12" +# dependencies = ["typer>=0.12"] +# /// + +"""Run all custom-skill lint/validation checks.""" + +from __future__ import annotations + +import subprocess +from pathlib import Path + +import typer + +ROOT = Path(__file__).resolve().parents[1] +app = typer.Typer(help="Run the custom-skill lint/eval pipeline.") + + +def run_command(command: list[str]) -> None: + result = subprocess.run( + command, + cwd=ROOT, + check=False, + ) + if result.returncode != 0: + raise SystemExit(result.returncode) + + +def run_pipeline() -> int: + print(">=> lint: validate custom skill structure and markdown style") + run_command(["uv", "run", "scripts/validate-custom-skills.py"]) + + print(">=> sync+check: sync .custom skills and verify mirror") + run_command(["uv", "run", "scripts/sync-custom-skills.py", "sync"]) + run_command(["uv", "run", "scripts/sync-custom-skills.py", "check"]) + + print("PASS: custom skill lint/tests complete") + return 0 + + +@app.callback(invoke_without_command=True) +def main() -> None: + """Run validation + sync/check pipeline.""" + raise typer.Exit(code=run_pipeline()) + + +if __name__ == "__main__": + app() diff --git a/scripts/validate-custom-skills.py b/scripts/validate-custom-skills.py new file mode 100755 index 00000000..e44459a4 --- /dev/null +++ b/scripts/validate-custom-skills.py @@ -0,0 +1,205 @@ +#!/usr/bin/env -S uv run +# /// script +# requires-python = ">=3.12" +# dependencies = ["typer>=0.12"] +# /// + +"""Validation and lightweight lint checks for `.custom` skills.""" + +from __future__ import annotations + +import re +from pathlib import Path + +import typer + +REPO_ROOT = Path(__file__).resolve().parents[1] +CUSTOM_ROOT = REPO_ROOT / "skills" / ".custom" + +REQUIRED_SKILLS = { + "zach-stack": { + "references": { + "web.md", + "python.md", + "testing.md", + "docs.md", + "workspaces.md", + "resources.md", + } + }, + "init-repo": { + "references": { + "new-repo-runbook.md", + } + }, + "agentify-repo": { + "references": { + "incremental-harness.md", + "convergence-phases.md", + "rollback-and-safety.md", + } + }, + "configure-codex": { + "references": { + "rules-default.md", + "allowed-commands-matrix.md", + "codex-environment.md", + } + }, +} + +SKILL_NAME_RE = re.compile(r"^[a-z0-9]+(?:-[a-z0-9]+)*$") + + +def fail(message: str, *, file: Path | None = None) -> None: + if file: + print(f"FAIL: {file}: {message}") + else: + print(f"FAIL: {message}") + raise SystemExit(1) + + +def check_markdown_lint(path: Path) -> None: + content = path.read_text() + if not content.endswith("\n"): + fail("does not end with newline", file=path) + if "\t" in content: + fail("contains tabs", file=path) + + if path.name == "SKILL.md": + if not content.startswith("---\n"): + fail("missing YAML frontmatter start", file=path) + parts = content.split("---", 2) + if len(parts) < 3: + fail("missing YAML frontmatter end marker", file=path) + if not re.search(r"^# ", content, re.M): + fail("missing top-level markdown heading", file=path) + + +app = typer.Typer(help="Validate skill definitions in skills/.custom.") + + +def parse_frontmatter(lines: list[str]) -> dict[str, str]: + fm: dict[str, str] = {} + for line in lines: + if not line.strip(): + continue + if ":" not in line: + continue + key, value = line.split(":", 1) + fm[key.strip()] = value.strip().strip('"') + return fm + + +def validate_frontmatter(skill_dir: Path) -> str: + skill_path = skill_dir / "SKILL.md" + if not skill_path.exists(): + fail("missing SKILL.md", file=skill_dir) + + content = skill_path.read_text() + check_markdown_lint(skill_path) + + if not content.startswith("---\n"): + fail("SKILL.md must start with YAML frontmatter", file=skill_path) + try: + _, raw_fm, _ = content.split("---", 2) + except ValueError as exc: + fail("invalid frontmatter block", file=skill_path) + raise SystemExit from exc + + fm = parse_frontmatter(raw_fm.splitlines()) + if "name" not in fm or "description" not in fm: + fail("missing required frontmatter keys: name, description", file=skill_path) + + name = fm["name"].strip() + if not SKILL_NAME_RE.match(name): + fail( + f"invalid skill name '{name}' in frontmatter; expected lower-case hyphen name", + file=skill_path, + ) + + if name != skill_dir.name: + fail(f"frontmatter name '{name}' must match directory '{skill_dir.name}'", file=skill_path) + + description = fm["description"].strip() + if len(description) > 1024: + fail("description exceeds 1024 chars", file=skill_path) + if "<" in description or ">" in description: + fail("description contains angle brackets", file=skill_path) + + for token in re.findall(r"\(references/([^)]*\.md)\)", content): + ref_path = skill_dir / "references" / token + if not ref_path.exists(): + fail(f"references/{token} link does not exist", file=skill_path) + + return name + + +def validate_agents_config(skill_dir: Path, name: str) -> None: + agent_cfg = skill_dir / "agents" / "openai.yaml" + if not agent_cfg.exists(): + return + content = agent_cfg.read_text() + if not content.startswith("interface:"): + fail("agents/openai.yaml missing interface block", file=agent_cfg) + if "display_name:" not in content: + fail("agents/openai.yaml missing display_name", file=agent_cfg) + if "default_prompt:" not in content: + fail("agents/openai.yaml missing default_prompt", file=agent_cfg) + if f"${name}" not in content: + fail(f"agents/openai.yaml default_prompt should mention ${name}", file=agent_cfg) + + +def validate_references(skill_dir: Path, required: set[str]) -> None: + refs_dir = skill_dir / "references" + if not refs_dir.exists(): + if required: + fail("missing references directory", file=skill_dir) + return + + actual = {p.name for p in refs_dir.glob("*.md") if p.is_file()} + missing = required - actual + if missing: + missing_str = ", ".join(sorted(missing)) + fail(f"missing required references: {missing_str}", file=skill_dir / "references") + + for p in refs_dir.glob("*.md"): + check_markdown_lint(p) + + +def run_validation(custom_root: Path = CUSTOM_ROOT) -> int: + if not custom_root.exists(): + fail(".custom directory does not exist at skills/.custom") + + if not (custom_root / "AGENTS.md").exists(): + fail("missing .custom/AGENTS.md") + + custom_skills = [ + p + for p in sorted(custom_root.iterdir()) + if p.is_dir() and not p.name.startswith(".") and (p / "SKILL.md").exists() + ] + + for skill_dir in custom_skills: + name = validate_frontmatter(skill_dir) + + required = REQUIRED_SKILLS.get(name, {}).get("references", set()) + validate_references(skill_dir, set(required)) + validate_agents_config(skill_dir, name) + + print("PASS: custom skills lint and structure checks") + return 0 + + +@app.callback(invoke_without_command=True) +def main( + custom_root: Path = typer.Option( + CUSTOM_ROOT, "--custom-root", "-c", help="Path to .custom skills directory." + ), +) -> None: + """Run `.custom` skill validation.""" + raise typer.Exit(code=run_validation(custom_root)) + + +if __name__ == "__main__": + app() diff --git a/skills/.custom/AGENTS.md b/skills/.custom/AGENTS.md new file mode 100644 index 00000000..0f4a1ff5 --- /dev/null +++ b/skills/.custom/AGENTS.md @@ -0,0 +1,8 @@ +# .custom Agent Instructions + +This directory contains personal/custom skills used for repo-specific agent workflows. + +- Keep skill definitions opinionated and minimal in `SKILL.md`. +- Put detailed instructions in `references/` files under each skill. +- Maintain one-to-one mirroring between `.custom/` and `.codex/skills/` via sync tooling. +- Run `bash scripts/test-custom-skills.sh` before committing `.custom` updates. diff --git a/skills/.custom/agentify-repo/SKILL.md b/skills/.custom/agentify-repo/SKILL.md new file mode 100644 index 00000000..72c3e571 --- /dev/null +++ b/skills/.custom/agentify-repo/SKILL.md @@ -0,0 +1,42 @@ +--- +name: agentify-repo +description: "Incrementally add harness and tooling to an existing repo so it converges toward zach-stack safely over multiple passes." +--- + +# Agentify Repo + +Use this skill when asked to make an existing repo more agent-friendly without one-shot rewrites. + +## Core goal + +If required dependencies are not installed in the current environment, bootstrap them first: + +- `$install-skills https://github.com/openai/skills/tree/main/skills/.custom/zach-stack` +- `$install-skills https://github.com/openai/skills/tree/main/skills/.custom/configure-codex` + +Set up harness/tooling in small, reversible steps that make the repo more verifiable and easier for agents to work with. + +## Recommended flow + +1. Baseline assessment: detect current stack, docs state, tests, lints, and CI. +2. Harness setup phase: add/update only low-risk structure first (`docs/`, `Justfile`, command discoverability, test map). +3. Apply `zach-stack`-aligned improvements in small batches: + - tooling defaults relevant to current stack + - testing additions by priority + - docs upkeep and file ownership mapping +4. Validate each phase. +5. Pause with explicit next milestone. + +## Hard rules + +- No broad migration in one run unless explicitly authorized. +- Prefer additive changes and stable commit points. +- If unknown/legacy constraints exist, defer and document them instead of forcing defaults. +- Ask before any environment or rules-file changes that require trust-sensitive decisions. + +## References + +- `references/incremental-harness.md` +- `references/convergence-phases.md` +- `references/rollback-and-safety.md` +- `../zach-stack` for target conventions. diff --git a/skills/.custom/agentify-repo/agents/openai.yaml b/skills/.custom/agentify-repo/agents/openai.yaml new file mode 100644 index 00000000..c76ca4e4 --- /dev/null +++ b/skills/.custom/agentify-repo/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Agentify Repo" + short_description: "Incrementally harden existing repos with zach-stack conventions." + default_prompt: "Use $agentify-repo to add agent-friendly tooling in safe milestones." diff --git a/skills/.custom/agentify-repo/references/convergence-phases.md b/skills/.custom/agentify-repo/references/convergence-phases.md new file mode 100644 index 00000000..78a4ae3f --- /dev/null +++ b/skills/.custom/agentify-repo/references/convergence-phases.md @@ -0,0 +1,19 @@ +# Convergence phases + +## Phase 0: Discovery + +- Detect existing stack and constraints. +- Record what cannot be changed safely in one pass. + +## Phase 1: Surface hardening + +- Docs + Justfile + lightweight test discoverability. +- Add project-specific lint/test quick checks. + +## Phase 2: Toolchain alignment + +- Add `zach-stack` defaults where they do not disrupt current architecture. + +## Phase 3+: Optional deepening + +- Introduce additional stack pieces (workspaces, visualization layout, docs automation, etc.) as explicit follow-up passes. diff --git a/skills/.custom/agentify-repo/references/incremental-harness.md b/skills/.custom/agentify-repo/references/incremental-harness.md new file mode 100644 index 00000000..1ec9b7c1 --- /dev/null +++ b/skills/.custom/agentify-repo/references/incremental-harness.md @@ -0,0 +1,16 @@ +# Incremental harness setup + +## Purpose + +Build the operating surface needed for agents before enforcing deeper stack shifts. + +## Initial changes (safe first) + +- Add/normalize `docs/` with a current file-map and runbook. +- Add/refresh `Justfile` command palette. +- Introduce a lightweight test index and smoke test list. +- Clarify command/tool ownership in docs. + +## Principle + +The harness layer should reduce uncertainty first: if agents can discover and run stable commands, deeper changes are cheaper and safer. diff --git a/skills/.custom/agentify-repo/references/rollback-and-safety.md b/skills/.custom/agentify-repo/references/rollback-and-safety.md new file mode 100644 index 00000000..ce23b55b --- /dev/null +++ b/skills/.custom/agentify-repo/references/rollback-and-safety.md @@ -0,0 +1,20 @@ +# Rollback and safety + +## Why it matters + +Incremental changes are useful only if each step is reversible. + +## Safety pattern + +- Make one conceptual category of change at a time. +- Validate commands and tests after each change set. +- Record a simple checkpoint note: + - What changed + - Why it changed + - How to revert + +## Example rollback triggers + +- Unexpected behavioral diff +- Test coverage mismatch introduced by command changes +- Documentation or automation drift diff --git a/skills/.custom/configure-codex/SKILL.md b/skills/.custom/configure-codex/SKILL.md new file mode 100644 index 00000000..ec80204b --- /dev/null +++ b/skills/.custom/configure-codex/SKILL.md @@ -0,0 +1,38 @@ +--- +name: configure-codex +description: "Conservatively configure `.codex/rules/default.rules` and Codex environment setup for repo and user ergonomics." +--- + +# Configure Codex + +Use this skill when asked to update Codex rules, command permissions, or environment setup for repository work. + +## Scope +- `.codex/rules/default.rules` updates. +- Command allow-list selection. +- Codex environment/worktree ergonomics. +- Guidance for repo-scoped vs user-scoped settings. + +## Guarded workflow + +1. Inventory current constraints and user intent. +2. Propose only relevant allowed command families (e.g., uv, just, pnpm, gh, glab). +3. Ask for confirmation before writing rule updates. +4. Apply minimally and explain impact. +5. Validate the new command surface and leave a follow-up note. + +## Conservative defaults + +- Keep allow-lists minimal. +- Prefer repo-only permissions over broad global overrides. +- Add no commands that are not used by the project. + +## Troubleshooting + +- If `.codex/rules` is not writable in the current workspace, update the user-level `/Users/zach.parent/.codex/rules/default.rules` after confirming scope and keep that in notes. + +## References + +- `references/rules-default.md` +- `references/allowed-commands-matrix.md` +- `references/codex-environment.md` diff --git a/skills/.custom/configure-codex/agents/openai.yaml b/skills/.custom/configure-codex/agents/openai.yaml new file mode 100644 index 00000000..ffaf935d --- /dev/null +++ b/skills/.custom/configure-codex/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Configure Codex" + short_description: "Set up codex rules and environment configuration safely." + default_prompt: "Use $configure-codex to propose and apply safe Codex configuration updates." diff --git a/skills/.custom/configure-codex/references/allowed-commands-matrix.md b/skills/.custom/configure-codex/references/allowed-commands-matrix.md new file mode 100644 index 00000000..a05fde93 --- /dev/null +++ b/skills/.custom/configure-codex/references/allowed-commands-matrix.md @@ -0,0 +1,17 @@ +# Allowed command matrix (`configure-codex`) + +## Always avoid by default + +- Blanket escalation of broad shell commands. +- Universal global allow-lists without project need. + +## Recommended matrix by project type + +- **Web-first**: `pnpm`, `just`, `gh`, optional `uv`. +- **Python-first**: `uv`, `just`, `gh`, optional `glab`. +- **Mixed**: web + python commands only where actively used. + +## Confirmation requirements + +- Ask before enabling commands not currently used by the repo. +- Keep user-visible logs of every newly enabled command. diff --git a/skills/.custom/configure-codex/references/codex-environment.md b/skills/.custom/configure-codex/references/codex-environment.md new file mode 100644 index 00000000..418065e9 --- /dev/null +++ b/skills/.custom/configure-codex/references/codex-environment.md @@ -0,0 +1,19 @@ +# Codex environment and worktree setup + +## Objective + +Make agent work predictable and repeatable in local and customer environments. + +## Setup areas + +- command presets for project startup +- worktree naming and path strategy +- reusable command sequences via `Justfile` +- lightweight onboarding docs for Codex behavior + +## Recommendations + +- Keep environment setup documented in `docs/`. +- Separate repo updates from user-profile updates. +- Validate environment commands after each phase. +- Use conservative defaults and escalate only as needed. diff --git a/skills/.custom/configure-codex/references/rules-default.md b/skills/.custom/configure-codex/references/rules-default.md new file mode 100644 index 00000000..ffdcd324 --- /dev/null +++ b/skills/.custom/configure-codex/references/rules-default.md @@ -0,0 +1,47 @@ +# `.codex/rules/default.rules` guidance + +## Principles + +- Keep permissions scoped to the project workflow. +- Prefer minimal, explicit commands. +- Ask before writing any potentially sensitive settings. + +## Typical command families + +- `uv`: Python install/test/runtime commands +- `just`: repeated repo task orchestration +- `pnpm`: node tooling for web apps +- `gh`: GitHub workflows and issue/pr management +- `glab`: GitLab/GLab workflow parity where used + +## Process + +- Propose a diff, confirm with user, then apply. +- Re-check the rule set after each commit boundary. +## Example `prefix_rule` syntax + +```python +prefix_rule( + pattern = ["uv", "sync"], + decision = "allow", + justification = "Allow project dependency sync outside sandbox", + match = [ + "uv sync", + "uv sync --locked", + ], + not_match = ["uv run sync"], +) +``` + +Use `match` and `not_match` as inline rule tests so malformed patterns fail fast when the rule file is loaded. + +## Validate the rules file + +Run a rule check after updating `default.rules`: + +```bash +codex execpolicy check --pretty --rules .codex/rules/default.rules -- uv sync +``` + +Use the same command for any command you changed in the allow list. + diff --git a/skills/.custom/init-repo/SKILL.md b/skills/.custom/init-repo/SKILL.md new file mode 100644 index 00000000..612d19e1 --- /dev/null +++ b/skills/.custom/init-repo/SKILL.md @@ -0,0 +1,40 @@ +--- +name: init-repo +description: "Initialize a new repo with a fast, testable, documented, agent-friendly setup using zach-stack defaults." +--- + +# Init Repo + +Use this skill when a user asks to start a new project or bootstrap an empty repository. + +## Workflow + +If required dependencies are missing in this environment, bootstrap them first: + +- `$install-skills https://github.com/openai/skills/tree/main/skills/.custom/zach-stack` +- `$install-skills https://github.com/openai/skills/tree/main/skills/.custom/configure-codex` + +1. Capture constraints and ask clarifying questions only if ambiguous. +2. Choose a stack profile: web, python, visualization, or mixed. +3. Create a phase-1 scaffold that is minimal but complete: + - `docs/` with setup and file map starter. + - `Justfile` with repeatable agent commands. + - Testing skeleton (unit + at least one integration check). + - Linting/pre-commit baseline for selected stack. + - Optional `.codex/rules/default.rules` proposal via `configure-codex`. +4. Add technology-specific defaults from `zach-stack`: + - web defaults (framework-light vs lightweight React) + - Python defaults (`uv`) + - workspace structure if mixed. +5. Define the first milestone commit boundary. + +## Guardrails + +- Keep `init-repo` incremental: prefer Phase 1 completion over full build-out. +- Default to minimal files and explain what is deferred. +- Never write sensitive agent/environment rules without explicit user confirmation. + +## References + +- Use `references/new-repo-runbook.md` for scaffold templates and command defaults. +- Use `../zach-stack` for stack-specific decisions. diff --git a/skills/.custom/init-repo/agents/openai.yaml b/skills/.custom/init-repo/agents/openai.yaml new file mode 100644 index 00000000..3aa15dd0 --- /dev/null +++ b/skills/.custom/init-repo/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Init Repo" + short_description: "Bootstrap a new repo with testable and agent-friendly defaults." + default_prompt: "Use $init-repo to scaffold a new repository with zach-stack conventions." diff --git a/skills/.custom/init-repo/references/new-repo-runbook.md b/skills/.custom/init-repo/references/new-repo-runbook.md new file mode 100644 index 00000000..d89c68c0 --- /dev/null +++ b/skills/.custom/init-repo/references/new-repo-runbook.md @@ -0,0 +1,21 @@ +# New repo runbook + +## Phase 1 scaffold (minimum) + +- Create `docs/` and add a compact onboarding file. +- Add `Justfile` with build/test/lint commands. +- Add dependency/tool defaults for selected stack (`pnpm` for web, `uv` for Python). +- Add testing skeleton for targeted unit and integration coverage. +- Add workspace/docs conventions in notes for future phases. + +## Recommended first tasks + +- Choose stack from `zach-stack` profiles. +- Add contributor-level checklist for tests and docs updates. +- Add a clean rollback point before changing environment/config files. + +## Suggested outputs by stack + +- Web: Tailwind baseline + lightweight project setup + test/check commands. +- Python: `pyproject.toml`/`uv` convention + basic test command. +- Mixed: two-phase plan with clear module boundaries and shared-data workspace notes. diff --git a/skills/.custom/zach-stack/SKILL.md b/skills/.custom/zach-stack/SKILL.md new file mode 100644 index 00000000..ca1398ab --- /dev/null +++ b/skills/.custom/zach-stack/SKILL.md @@ -0,0 +1,58 @@ +--- +name: zach-stack +description: "Define opinionated conventions for testable, well-documented, agent-friendly repos. Use for stack selection and application defaults." +--- + +# Zach-Stack + +Use this skill when asked to define or apply a preferred stack for a new/existing project. + +## Purpose + +`zach-stack` is the compact decision source for: +- project scaffolding defaults +- tooling conventions +- test strategy expectations +- docs/lifecycle defaults +- Codex-friendly repository structure + +## When to use + +- before or during `init-repo` +- during `agentify-repo` as the target conventions +- when a team member asks for your preferred stack defaults + +## Core rules (high signal) + +- **Minimal web by default**: prefer plain HTML/CSS/JS when requirements are simple. +- **Dynamic web**: choose a lightweight React setup when needed for stateful interactions. +- **Complex static sites**: prefer Eleventy. +- **CSS**: default to Tailwind. +- **Python work**: use `uv` for package management and virtual environments. +- **Workspaces**: prefer workspace-based organization for multi-module projects. +- **CLI stack**: prefer **Typer** for new CLIs; use **Click** when existing codebases already use it. +- **Testing**: add targeted unit tests and at least one integration/acceptance layer. +- **Frontend checks**: include automated checks that cover real user workflows. +- **Playwright path**: use Playwright CLI for exploration and flow debugging; use Playwright-driven E2E where applicable. +- **Visualization**: prefer Streamlit + Plotly for analytics UI. +- **Project shape for shared data**: use a dedicated workspace/module for data, plus workspace boundaries for Streamlit/compute when needed. +- **Standalone scripts**: keep CLIs standalone with minimal dependencies and explicit script-level dependency boundaries; keep CLIs minimally scoped. +- **Docs as source of truth**: every project must have `docs/` and keep it current with code changes. +- **Automation**: include `Justfile` in most repos for repeatable agent tasks. +- **Pre-commit**: always define pre-commit via project-native tooling (`pnpm` or `uv`). + +## Process + +1. Ask for project intent: web app, python service, data app, or mixed. +2. Select a minimal stack from these preferences. +3. Confirm constraints that override defaults (security, infra, legacy platform). +4. Apply only what is relevant for the project phase. + +## References + +- `references/web.md` for web defaults, CSS, and Playwright use. +- `references/python.md` for UV, workspace, and Python conventions. +- `references/testing.md` for test structure expectations. +- `references/docs.md` for living docs patterns. +- `references/workspaces.md` for shared workspaces in mixed stacks. +- `references/resources.md` for external source references. diff --git a/skills/.custom/zach-stack/agents/openai.yaml b/skills/.custom/zach-stack/agents/openai.yaml new file mode 100644 index 00000000..12336df6 --- /dev/null +++ b/skills/.custom/zach-stack/agents/openai.yaml @@ -0,0 +1,4 @@ +interface: + display_name: "Zach Stack" + short_description: "Opinionated stack defaults for agent-friendly repos." + default_prompt: "Use $zach-stack to define technology and workflow defaults for the repo." diff --git a/skills/.custom/zach-stack/references/docs.md b/skills/.custom/zach-stack/references/docs.md new file mode 100644 index 00000000..4479841d --- /dev/null +++ b/skills/.custom/zach-stack/references/docs.md @@ -0,0 +1,19 @@ +# Documentation conventions (`zach-stack`) + +## Minimum + +- Every repo must include `docs/`. +- Docs should include a file index/structure map and quick update points. +- Keep docs updated as part of meaningful code changes. + +## Practical expectations + +- Maintain short-lived markdown docs that stay current with code. +- Include architecture and setup notes in `docs/` for onboarding. +- Keep one canonical source-of-truth for setup commands and developer workflows. +- Add a short, versioned list of files and responsibility in `docs/file-map.md` (or equivalent). + +## Docs quality and checks + +- Use docs linting where practical to catch stale references. +- Treat docs quality as part of completion criteria before considering a change done. diff --git a/skills/.custom/zach-stack/references/python.md b/skills/.custom/zach-stack/references/python.md new file mode 100644 index 00000000..3807a99e --- /dev/null +++ b/skills/.custom/zach-stack/references/python.md @@ -0,0 +1,33 @@ +# Python conventions (`zach-stack`) + +## Scope +Use for Python-heavy or mixed repos with Python services/modules. + +## Tooling defaults + +- Use `uv` for dependency/developer workflow. +- Prefer workspace separation when there are multiple independent Python modules. +- Use a dedicated `workspace` structure when teams need clean boundaries for CLI, service, library, and jobs. + +## CLI conventions + +- Prefer **Typer** for new CLI development. +- Accept **Click** when existing codepaths or existing teams standardize on Click. +- Keep CLI entrypoints near interfaces and minimize coupling to service internals. +- Make scripts standalone where practical: + - put CLI dependency declarations in the nearest packaging boundary + - avoid importing large module stacks at module import time + - keep defaults explicit to reduce runtime surprises + +## App architecture recommendations + +- If data is consumed by both core services and visualization: + - create a separate data workspace/project for shared contracts and loaders + - create separate consumer workspaces (for example API layer, Streamlit app) +- For data visualization: use Streamlit + Plotly. +- If the project is small and one-purpose, keep one workspace and add explicit module boundaries. + +## Maintenance and tests + +- Keep unit tests near implementation modules. +- Add integration tests for external boundaries (HTTP handlers, file contracts, job inputs). diff --git a/skills/.custom/zach-stack/references/resources.md b/skills/.custom/zach-stack/references/resources.md new file mode 100644 index 00000000..bbd5eacb --- /dev/null +++ b/skills/.custom/zach-stack/references/resources.md @@ -0,0 +1,13 @@ +# External references for `zach-stack` + +- [Agent Skills homepage](https://agentskills.io/home) +- [Agent Skills specification](https://agentskills.io/specification) +- [What are Skills?](https://agentskills.io/what-are-skills) +- [Harness engineering at OpenAI](https://openai.com/index/harness-engineering/) +- [Testing Agent Skills with Evals](https://developers.openai.com/blog/eval-skills/) + +## Why these references are here + +- Keep SKILL files concise and point to standards-based context when needed. +- Encourage incremental, low-friction automation. +- Reinforce evaluation and quality checks for repeatable agent behavior. diff --git a/skills/.custom/zach-stack/references/testing.md b/skills/.custom/zach-stack/references/testing.md new file mode 100644 index 00000000..e716a44b --- /dev/null +++ b/skills/.custom/zach-stack/references/testing.md @@ -0,0 +1,45 @@ +# Testing conventions (`zach-stack`) + +## CLI strategy defaults + +- **CLI frameworks**: prefer `Typer` for new CLI implementations. +- **CLI alternatives**: `Click` is acceptable for existing or click-first codebases and when explicit subcommand ergonomics are required. +- **Dependency hygiene**: keep CLI modules lightweight; make scripts as self-contained and dependency-bounded as possible by: + - placing entrypoints in dedicated files/modules + - scoping dependencies to the package/script boundary + - avoiding monolithic "utility" scripts with broad transitive imports + +## Test expectations by framework + +- Add focused unit tests for parsing, argument validation, and command routing. +- Add integration-style tests for command side effects (filesystem, subprocess calls, exit codes). +- For **Typer** CLIs: + - test commands with `click.testing.CliRunner` syntax (Typer uses Click internally) + - verify exit codes and output + - verify help text and global options +- For **Click** CLIs: + - test via `click.testing.CliRunner` + - validate success and failure paths + - include edge-case tests around argument parsing and exit codes +- For both, assert deterministic outputs where possible. + +## Frontend checks + +- For UI work, include interaction-level validation against real page behavior (manual Playwright flow check and automated checks where feasible). + +## Docs/test coupling + +- Every major behavior change should include an updated doc entry describing intent and validation command. +- Avoid relying on broad integration tests only; use focused tests for failure isolation. + +## CLI command style for single-command scripts + +- For scripts intended to have a single action, prefer exposing the behavior on the default command and avoid a dedicated `run` subcommand label. +- This keeps invocation simple for agents: `uv run scripts/.py`. + +## Examples + +- `uv run scripts/validate-custom-skills.py` (default entrypoint) +- `uv run scripts/run-skill-evals.py` +- `uv run scripts/test-custom-skills.py` +- `uv run scripts/sync-custom-skill.py sync --dry-run` diff --git a/skills/.custom/zach-stack/references/web.md b/skills/.custom/zach-stack/references/web.md new file mode 100644 index 00000000..e7c7e814 --- /dev/null +++ b/skills/.custom/zach-stack/references/web.md @@ -0,0 +1,28 @@ +# Web conventions (`zach-stack`) + +## Scope +Use when a project includes frontend code and needs a consistent, lightweight stack. + +## Conventions + +- Use lightweight, framework-free implementations for simple pages. +- For dynamic client behavior, prefer lightweight React over heavy abstraction layers. +- Use Tailwind for styling defaults. +- For content-heavy or markdown-driven complex static sites, use Eleventy. +- Use `pnpm` as package manager by default for web tooling. +- Keep browser-facing code testable with at least: + - unit-level checks for pure functions/components (or equivalent) + - integration tests for user flows + - end-to-end checks for critical UI paths + +## Playwright and checks + +- For interactive UI exploration and triage, use Playwright CLI first. +- For stable e2e coverage, standardize on Playwright-based automation in CI for key flows. +- For Python-facing web UIs, include the Python Playwright stack where helpful. +- Keep selectors and test semantics resilient to refactors. + +## Decision points + +- If no interactive state and no component orchestration are needed: no React. +- If there are interactive widgets, state-driven forms, or realtime updates: use React. diff --git a/skills/.custom/zach-stack/references/workspaces.md b/skills/.custom/zach-stack/references/workspaces.md new file mode 100644 index 00000000..554254eb --- /dev/null +++ b/skills/.custom/zach-stack/references/workspaces.md @@ -0,0 +1,12 @@ +# Workspace conventions (`zach-stack`) + +## Rationale + +Use workspaces to separate concerns, reduce coupling, and support agent navigation. + +## Typical patterns + +- `workspace` for source code organization when multiple modules are expected. +- Separate project for shared data access layer if data feeds multiple components. +- Separate Streamlit app workspace for visualization and UX. +- Keep dependency graphs explicit and directional (core < data < apps). diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..5cb3cbe6 --- /dev/null +++ b/uv.lock @@ -0,0 +1,394 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "cfgv" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "filelock" +version = "3.24.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" }, +] + +[[package]] +name = "identify" +version = "2.6.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" }, +] + +[[package]] +name = "librt" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/9c/b4b0c54d84da4a94b37bd44151e46d5e583c9534c7e02250b961b1b6d8a8/librt-0.8.1.tar.gz", hash = "sha256:be46a14693955b3bd96014ccbdb8339ee8c9346fbe11c1b78901b55125f14c73", size = 177471, upload-time = "2026-02-17T16:13:06.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/21/d39b0a87ac52fc98f621fb6f8060efb017a767ebbbac2f99fbcbc9ddc0d7/librt-0.8.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a28f2612ab566b17f3698b0da021ff9960610301607c9a5e8eaca62f5e1c350a", size = 66516, upload-time = "2026-02-17T16:11:41.604Z" }, + { url = "https://files.pythonhosted.org/packages/69/f1/46375e71441c43e8ae335905e069f1c54febee63a146278bcee8782c84fd/librt-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:60a78b694c9aee2a0f1aaeaa7d101cf713e92e8423a941d2897f4fa37908dab9", size = 68634, upload-time = "2026-02-17T16:11:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/c510de7f93bf1fa19e13423a606d8189a02624a800710f6e6a0a0f0784b3/librt-0.8.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:758509ea3f1eba2a57558e7e98f4659d0ea7670bff49673b0dde18a3c7e6c0eb", size = 198941, upload-time = "2026-02-17T16:11:44.28Z" }, + { url = "https://files.pythonhosted.org/packages/dd/36/e725903416409a533d92398e88ce665476f275081d0d7d42f9c4951999e5/librt-0.8.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:039b9f2c506bd0ab0f8725aa5ba339c6f0cd19d3b514b50d134789809c24285d", size = 209991, upload-time = "2026-02-17T16:11:45.462Z" }, + { url = "https://files.pythonhosted.org/packages/30/7a/8d908a152e1875c9f8eac96c97a480df425e657cdb47854b9efaa4998889/librt-0.8.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bb54f1205a3a6ab41a6fd71dfcdcbd278670d3a90ca502a30d9da583105b6f7", size = 224476, upload-time = "2026-02-17T16:11:46.542Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b8/a22c34f2c485b8903a06f3fe3315341fe6876ef3599792344669db98fcff/librt-0.8.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:05bd41cdee35b0c59c259f870f6da532a2c5ca57db95b5f23689fcb5c9e42440", size = 217518, upload-time = "2026-02-17T16:11:47.746Z" }, + { url = "https://files.pythonhosted.org/packages/79/6f/5c6fea00357e4f82ba44f81dbfb027921f1ab10e320d4a64e1c408d035d9/librt-0.8.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adfab487facf03f0d0857b8710cf82d0704a309d8ffc33b03d9302b4c64e91a9", size = 225116, upload-time = "2026-02-17T16:11:49.298Z" }, + { url = "https://files.pythonhosted.org/packages/f2/a0/95ced4e7b1267fe1e2720a111685bcddf0e781f7e9e0ce59d751c44dcfe5/librt-0.8.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:153188fe98a72f206042be10a2c6026139852805215ed9539186312d50a8e972", size = 217751, upload-time = "2026-02-17T16:11:50.49Z" }, + { url = "https://files.pythonhosted.org/packages/93/c2/0517281cb4d4101c27ab59472924e67f55e375bc46bedae94ac6dc6e1902/librt-0.8.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dd3c41254ee98604b08bd5b3af5bf0a89740d4ee0711de95b65166bf44091921", size = 218378, upload-time = "2026-02-17T16:11:51.783Z" }, + { url = "https://files.pythonhosted.org/packages/43/e8/37b3ac108e8976888e559a7b227d0ceac03c384cfd3e7a1c2ee248dbae79/librt-0.8.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e0d138c7ae532908cbb342162b2611dbd4d90c941cd25ab82084aaf71d2c0bd0", size = 241199, upload-time = "2026-02-17T16:11:53.561Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/35812d041c53967fedf551a39399271bbe4257e681236a2cf1a69c8e7fa1/librt-0.8.1-cp312-cp312-win32.whl", hash = "sha256:43353b943613c5d9c49a25aaffdba46f888ec354e71e3529a00cca3f04d66a7a", size = 54917, upload-time = "2026-02-17T16:11:54.758Z" }, + { url = "https://files.pythonhosted.org/packages/de/d1/fa5d5331b862b9775aaf2a100f5ef86854e5d4407f71bddf102f4421e034/librt-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff8baf1f8d3f4b6b7257fcb75a501f2a5499d0dda57645baa09d4d0d34b19444", size = 62017, upload-time = "2026-02-17T16:11:55.748Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7c/c614252f9acda59b01a66e2ddfd243ed1c7e1deab0293332dfbccf862808/librt-0.8.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f2ae3725904f7377e11cc37722d5d401e8b3d5851fb9273d7f4fe04f6b3d37d", size = 52441, upload-time = "2026-02-17T16:11:56.801Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3c/f614c8e4eaac7cbf2bbdf9528790b21d89e277ee20d57dc6e559c626105f/librt-0.8.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7e6bad1cd94f6764e1e21950542f818a09316645337fd5ab9a7acc45d99a8f35", size = 66529, upload-time = "2026-02-17T16:11:57.809Z" }, + { url = "https://files.pythonhosted.org/packages/ab/96/5836544a45100ae411eda07d29e3d99448e5258b6e9c8059deb92945f5c2/librt-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cf450f498c30af55551ba4f66b9123b7185362ec8b625a773b3d39aa1a717583", size = 68669, upload-time = "2026-02-17T16:11:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/06/53/f0b992b57af6d5531bf4677d75c44f095f2366a1741fb695ee462ae04b05/librt-0.8.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eca45e982fa074090057132e30585a7e8674e9e885d402eae85633e9f449ce6c", size = 199279, upload-time = "2026-02-17T16:11:59.862Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ad/4848cc16e268d14280d8168aee4f31cea92bbd2b79ce33d3e166f2b4e4fc/librt-0.8.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c3811485fccfda840861905b8c70bba5ec094e02825598bb9d4ca3936857a04", size = 210288, upload-time = "2026-02-17T16:12:00.954Z" }, + { url = "https://files.pythonhosted.org/packages/52/05/27fdc2e95de26273d83b96742d8d3b7345f2ea2bdbd2405cc504644f2096/librt-0.8.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e4af413908f77294605e28cfd98063f54b2c790561383971d2f52d113d9c363", size = 224809, upload-time = "2026-02-17T16:12:02.108Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d0/78200a45ba3240cb042bc597d6f2accba9193a2c57d0356268cbbe2d0925/librt-0.8.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5212a5bd7fae98dae95710032902edcd2ec4dc994e883294f75c857b83f9aba0", size = 218075, upload-time = "2026-02-17T16:12:03.631Z" }, + { url = "https://files.pythonhosted.org/packages/af/72/a210839fa74c90474897124c064ffca07f8d4b347b6574d309686aae7ca6/librt-0.8.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e692aa2d1d604e6ca12d35e51fdc36f4cda6345e28e36374579f7ef3611b3012", size = 225486, upload-time = "2026-02-17T16:12:04.725Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c1/a03cc63722339ddbf087485f253493e2b013039f5b707e8e6016141130fa/librt-0.8.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4be2a5c926b9770c9e08e717f05737a269b9d0ebc5d2f0060f0fe3fe9ce47acb", size = 218219, upload-time = "2026-02-17T16:12:05.828Z" }, + { url = "https://files.pythonhosted.org/packages/58/f5/fff6108af0acf941c6f274a946aea0e484bd10cd2dc37610287ce49388c5/librt-0.8.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fd1a720332ea335ceb544cf0a03f81df92abd4bb887679fd1e460976b0e6214b", size = 218750, upload-time = "2026-02-17T16:12:07.09Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/5a387bfef30ec1e4b4f30562c8586566faf87e47d696768c19feb49e3646/librt-0.8.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2af9e01e0ef80d95ae3c720be101227edae5f2fe7e3dc63d8857fadfc5a1d", size = 241624, upload-time = "2026-02-17T16:12:08.43Z" }, + { url = "https://files.pythonhosted.org/packages/d4/be/24f8502db11d405232ac1162eb98069ca49c3306c1d75c6ccc61d9af8789/librt-0.8.1-cp313-cp313-win32.whl", hash = "sha256:086a32dbb71336627e78cc1d6ee305a68d038ef7d4c39aaff41ae8c9aa46e91a", size = 54969, upload-time = "2026-02-17T16:12:09.633Z" }, + { url = "https://files.pythonhosted.org/packages/5c/73/c9fdf6cb2a529c1a092ce769a12d88c8cca991194dfe641b6af12fa964d2/librt-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:e11769a1dbda4da7b00a76cfffa67aa47cfa66921d2724539eee4b9ede780b79", size = 62000, upload-time = "2026-02-17T16:12:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/d3/97/68f80ca3ac4924f250cdfa6e20142a803e5e50fca96ef5148c52ee8c10ea/librt-0.8.1-cp313-cp313-win_arm64.whl", hash = "sha256:924817ab3141aca17893386ee13261f1d100d1ef410d70afe4389f2359fea4f0", size = 52495, upload-time = "2026-02-17T16:12:11.633Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6a/907ef6800f7bca71b525a05f1839b21f708c09043b1c6aa77b6b827b3996/librt-0.8.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6cfa7fe54fd4d1f47130017351a959fe5804bda7a0bc7e07a2cdbc3fdd28d34f", size = 66081, upload-time = "2026-02-17T16:12:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/1b/18/25e991cd5640c9fb0f8d91b18797b29066b792f17bf8493da183bf5caabe/librt-0.8.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:228c2409c079f8c11fb2e5d7b277077f694cb93443eb760e00b3b83cb8b3176c", size = 68309, upload-time = "2026-02-17T16:12:13.756Z" }, + { url = "https://files.pythonhosted.org/packages/a4/36/46820d03f058cfb5a9de5940640ba03165ed8aded69e0733c417bb04df34/librt-0.8.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7aae78ab5e3206181780e56912d1b9bb9f90a7249ce12f0e8bf531d0462dd0fc", size = 196804, upload-time = "2026-02-17T16:12:14.818Z" }, + { url = "https://files.pythonhosted.org/packages/59/18/5dd0d3b87b8ff9c061849fbdb347758d1f724b9a82241aa908e0ec54ccd0/librt-0.8.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:172d57ec04346b047ca6af181e1ea4858086c80bdf455f61994c4aa6fc3f866c", size = 206907, upload-time = "2026-02-17T16:12:16.513Z" }, + { url = "https://files.pythonhosted.org/packages/d1/96/ef04902aad1424fd7299b62d1890e803e6ab4018c3044dca5922319c4b97/librt-0.8.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b1977c4ea97ce5eb7755a78fae68d87e4102e4aaf54985e8b56806849cc06a3", size = 221217, upload-time = "2026-02-17T16:12:17.906Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ff/7e01f2dda84a8f5d280637a2e5827210a8acca9a567a54507ef1c75b342d/librt-0.8.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:10c42e1f6fd06733ef65ae7bebce2872bcafd8d6e6b0a08fe0a05a23b044fb14", size = 214622, upload-time = "2026-02-17T16:12:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/1e/8c/5b093d08a13946034fed57619742f790faf77058558b14ca36a6e331161e/librt-0.8.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4c8dfa264b9193c4ee19113c985c95f876fae5e51f731494fc4e0cf594990ba7", size = 221987, upload-time = "2026-02-17T16:12:20.331Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cc/86b0b3b151d40920ad45a94ce0171dec1aebba8a9d72bb3fa00c73ab25dd/librt-0.8.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:01170b6729a438f0dedc4a26ed342e3dc4f02d1000b4b19f980e1877f0c297e6", size = 215132, upload-time = "2026-02-17T16:12:21.54Z" }, + { url = "https://files.pythonhosted.org/packages/fc/be/8588164a46edf1e69858d952654e216a9a91174688eeefb9efbb38a9c799/librt-0.8.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:7b02679a0d783bdae30d443025b94465d8c3dc512f32f5b5031f93f57ac32071", size = 215195, upload-time = "2026-02-17T16:12:23.073Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f2/0b9279bea735c734d69344ecfe056c1ba211694a72df10f568745c899c76/librt-0.8.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:190b109bb69592a3401fe1ffdea41a2e73370ace2ffdc4a0e8e2b39cdea81b78", size = 237946, upload-time = "2026-02-17T16:12:24.275Z" }, + { url = "https://files.pythonhosted.org/packages/e9/cc/5f2a34fbc8aeb35314a3641f9956fa9051a947424652fad9882be7a97949/librt-0.8.1-cp314-cp314-win32.whl", hash = "sha256:e70a57ecf89a0f64c24e37f38d3fe217a58169d2fe6ed6d70554964042474023", size = 50689, upload-time = "2026-02-17T16:12:25.766Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/cd4d010ab2147339ca2b93e959c3686e964edc6de66ddacc935c325883d7/librt-0.8.1-cp314-cp314-win_amd64.whl", hash = "sha256:7e2f3edca35664499fbb36e4770650c4bd4a08abc1f4458eab9df4ec56389730", size = 57875, upload-time = "2026-02-17T16:12:27.465Z" }, + { url = "https://files.pythonhosted.org/packages/84/0f/2143cb3c3ca48bd3379dcd11817163ca50781927c4537345d608b5045998/librt-0.8.1-cp314-cp314-win_arm64.whl", hash = "sha256:0d2f82168e55ddefd27c01c654ce52379c0750ddc31ee86b4b266bcf4d65f2a3", size = 48058, upload-time = "2026-02-17T16:12:28.556Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0e/9b23a87e37baf00311c3efe6b48d6b6c168c29902dfc3f04c338372fd7db/librt-0.8.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c74a2da57a094bd48d03fa5d196da83d2815678385d2978657499063709abe1", size = 68313, upload-time = "2026-02-17T16:12:29.659Z" }, + { url = "https://files.pythonhosted.org/packages/db/9a/859c41e5a4f1c84200a7d2b92f586aa27133c8243b6cac9926f6e54d01b9/librt-0.8.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a355d99c4c0d8e5b770313b8b247411ed40949ca44e33e46a4789b9293a907ee", size = 70994, upload-time = "2026-02-17T16:12:31.516Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/10605366ee599ed34223ac2bf66404c6fb59399f47108215d16d5ad751a8/librt-0.8.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2eb345e8b33fb748227409c9f1233d4df354d6e54091f0e8fc53acdb2ffedeb7", size = 220770, upload-time = "2026-02-17T16:12:33.294Z" }, + { url = "https://files.pythonhosted.org/packages/af/8d/16ed8fd452dafae9c48d17a6bc1ee3e818fd40ef718d149a8eff2c9f4ea2/librt-0.8.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9be2f15e53ce4e83cc08adc29b26fb5978db62ef2a366fbdf716c8a6c8901040", size = 235409, upload-time = "2026-02-17T16:12:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/7bdf3e49349c134b25db816e4a3db6b94a47ac69d7d46b1e682c2c4949be/librt-0.8.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:785ae29c1f5c6e7c2cde2c7c0e148147f4503da3abc5d44d482068da5322fd9e", size = 246473, upload-time = "2026-02-17T16:12:36.656Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8a/91fab8e4fd2a24930a17188c7af5380eb27b203d72101c9cc000dbdfd95a/librt-0.8.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d3a7da44baf692f0c6aeb5b2a09c5e6fc7a703bca9ffa337ddd2e2da53f7732", size = 238866, upload-time = "2026-02-17T16:12:37.849Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e0/c45a098843fc7c07e18a7f8a24ca8496aecbf7bdcd54980c6ca1aaa79a8e/librt-0.8.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fc48998000cbc39ec0d5311312dda93ecf92b39aaf184c5e817d5d440b29624", size = 250248, upload-time = "2026-02-17T16:12:39.445Z" }, + { url = "https://files.pythonhosted.org/packages/82/30/07627de23036640c952cce0c1fe78972e77d7d2f8fd54fa5ef4554ff4a56/librt-0.8.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e96baa6820280077a78244b2e06e416480ed859bbd8e5d641cf5742919d8beb4", size = 240629, upload-time = "2026-02-17T16:12:40.889Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/55bfe1ee3542eba055616f9098eaf6eddb966efb0ca0f44eaa4aba327307/librt-0.8.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:31362dbfe297b23590530007062c32c6f6176f6099646bb2c95ab1b00a57c382", size = 239615, upload-time = "2026-02-17T16:12:42.446Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/191d3d28abc26c9099b19852e6c99f7f6d400b82fa5a4e80291bd3803e19/librt-0.8.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cc3656283d11540ab0ea01978378e73e10002145117055e03722417aeab30994", size = 263001, upload-time = "2026-02-17T16:12:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/b9/eb/7697f60fbe7042ab4e88f4ee6af496b7f222fffb0a4e3593ef1f29f81652/librt-0.8.1-cp314-cp314t-win32.whl", hash = "sha256:738f08021b3142c2918c03692608baed43bc51144c29e35807682f8070ee2a3a", size = 51328, upload-time = "2026-02-17T16:12:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/7c/72/34bf2eb7a15414a23e5e70ecb9440c1d3179f393d9349338a91e2781c0fb/librt-0.8.1-cp314-cp314t-win_amd64.whl", hash = "sha256:89815a22daf9c51884fb5dbe4f1ef65ee6a146e0b6a8df05f753e2e4a9359bf4", size = 58722, upload-time = "2026-02-17T16:12:46.85Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, +] + +[[package]] +name = "pathspec" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.9.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/04/fea538adf7dbbd6d186f551d595961e564a3b6715bdf276b477460858672/platformdirs-4.9.2.tar.gz", hash = "sha256:9a33809944b9db043ad67ca0db94b14bf452cc6aeaac46a88ea55b26e2e9d291", size = 28394, upload-time = "2026-02-16T03:56:10.574Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/31/05e764397056194206169869b50cf2fee4dbbbc71b344705b9c0d878d4d8/platformdirs-4.9.2-py3-none-any.whl", hash = "sha256:9170634f126f8efdae22fb58ae8a0eaa86f38365bc57897a6c4f781d1f5875bd", size = 21168, upload-time = "2026-02-16T03:56:08.891Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/04/eab13a954e763b0606f460443fcbf6bb5a0faf06890ea3754ff16523dce5/ruff-0.15.2.tar.gz", hash = "sha256:14b965afee0969e68bb871eba625343b8673375f457af4abe98553e8bbb98342", size = 4558148, upload-time = "2026-02-19T22:32:20.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/70/3a4dc6d09b13cb3e695f28307e5d889b2e1a66b7af9c5e257e796695b0e6/ruff-0.15.2-py3-none-linux_armv6l.whl", hash = "sha256:120691a6fdae2f16d65435648160f5b81a9625288f75544dc40637436b5d3c0d", size = 10430565, upload-time = "2026-02-19T22:32:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/bb8457b56185ece1305c666dc895832946d24055be90692381c31d57466d/ruff-0.15.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a89056d831256099658b6bba4037ac6dd06f49d194199215befe2bb10457ea5e", size = 10820354, upload-time = "2026-02-19T22:32:07.366Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c1/e0532d7f9c9e0b14c46f61b14afd563298b8b83f337b6789ddd987e46121/ruff-0.15.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e36dee3a64be0ebd23c86ffa3aa3fd3ac9a712ff295e192243f814a830b6bd87", size = 10170767, upload-time = "2026-02-19T22:32:13.188Z" }, + { url = "https://files.pythonhosted.org/packages/47/e8/da1aa341d3af017a21c7a62fb5ec31d4e7ad0a93ab80e3a508316efbcb23/ruff-0.15.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fb47b6d9764677f8c0a193c0943ce9a05d6763523f132325af8a858eadc2b9", size = 10529591, upload-time = "2026-02-19T22:32:02.547Z" }, + { url = "https://files.pythonhosted.org/packages/93/74/184fbf38e9f3510231fbc5e437e808f0b48c42d1df9434b208821efcd8d6/ruff-0.15.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f376990f9d0d6442ea9014b19621d8f2aaf2b8e39fdbfc79220b7f0c596c9b80", size = 10260771, upload-time = "2026-02-19T22:32:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/05/ac/605c20b8e059a0bc4b42360414baa4892ff278cec1c91fff4be0dceedefd/ruff-0.15.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dcc987551952d73cbf5c88d9fdee815618d497e4df86cd4c4824cc59d5dd75f", size = 11045791, upload-time = "2026-02-19T22:32:31.642Z" }, + { url = "https://files.pythonhosted.org/packages/fd/52/db6e419908f45a894924d410ac77d64bdd98ff86901d833364251bd08e22/ruff-0.15.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a47fd785cbe8c01b9ff45031af875d101b040ad8f4de7bbb716487c74c9a77", size = 11879271, upload-time = "2026-02-19T22:32:29.305Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d8/7992b18f2008bdc9231d0f10b16df7dda964dbf639e2b8b4c1b4e91b83af/ruff-0.15.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe9f49354866e575b4c6943856989f966421870e85cd2ac94dccb0a9dcb2fea", size = 11303707, upload-time = "2026-02-19T22:32:22.492Z" }, + { url = "https://files.pythonhosted.org/packages/d7/02/849b46184bcfdd4b64cde61752cc9a146c54759ed036edd11857e9b8443b/ruff-0.15.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a672c82b5f9887576087d97be5ce439f04bbaf548ee987b92d3a7dede41d3a", size = 11149151, upload-time = "2026-02-19T22:32:44.234Z" }, + { url = "https://files.pythonhosted.org/packages/70/04/f5284e388bab60d1d3b99614a5a9aeb03e0f333847e2429bebd2aaa1feec/ruff-0.15.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:72ecc64f46f7019e2bcc3cdc05d4a7da958b629a5ab7033195e11a438403d956", size = 11091132, upload-time = "2026-02-19T22:32:24.691Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ae/88d844a21110e14d92cf73d57363fab59b727ebeabe78009b9ccb23500af/ruff-0.15.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8dcf243b15b561c655c1ef2f2b0050e5d50db37fe90115507f6ff37d865dc8b4", size = 10504717, upload-time = "2026-02-19T22:32:26.75Z" }, + { url = "https://files.pythonhosted.org/packages/64/27/867076a6ada7f2b9c8292884ab44d08fd2ba71bd2b5364d4136f3cd537e1/ruff-0.15.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dab6941c862c05739774677c6273166d2510d254dac0695c0e3f5efa1b5585de", size = 10263122, upload-time = "2026-02-19T22:32:10.036Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ef/faf9321d550f8ebf0c6373696e70d1758e20ccdc3951ad7af00c0956be7c/ruff-0.15.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b9164f57fc36058e9a6806eb92af185b0697c9fe4c7c52caa431c6554521e5c", size = 10735295, upload-time = "2026-02-19T22:32:39.227Z" }, + { url = "https://files.pythonhosted.org/packages/2f/55/e8089fec62e050ba84d71b70e7834b97709ca9b7aba10c1a0b196e493f97/ruff-0.15.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:80d24fcae24d42659db7e335b9e1531697a7102c19185b8dc4a028b952865fd8", size = 11241641, upload-time = "2026-02-19T22:32:34.617Z" }, + { url = "https://files.pythonhosted.org/packages/23/01/1c30526460f4d23222d0fabd5888868262fd0e2b71a00570ca26483cd993/ruff-0.15.2-py3-none-win32.whl", hash = "sha256:fd5ff9e5f519a7e1bd99cbe8daa324010a74f5e2ebc97c6242c08f26f3714f6f", size = 10507885, upload-time = "2026-02-19T22:32:15.635Z" }, + { url = "https://files.pythonhosted.org/packages/5c/10/3d18e3bbdf8fc50bbb4ac3cc45970aa5a9753c5cb51bf9ed9a3cd8b79fa3/ruff-0.15.2-py3-none-win_amd64.whl", hash = "sha256:d20014e3dfa400f3ff84830dfb5755ece2de45ab62ecea4af6b7262d0fb4f7c5", size = 11623725, upload-time = "2026-02-19T22:32:04.947Z" }, + { url = "https://files.pythonhosted.org/packages/6d/78/097c0798b1dab9f8affe73da9642bb4500e098cb27fd8dc9724816ac747b/ruff-0.15.2-py3-none-win_arm64.whl", hash = "sha256:cabddc5822acdc8f7b5527b36ceac55cc51eec7b1946e60181de8fe83ca8876e", size = 10941649, upload-time = "2026-02-19T22:32:18.108Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "skills" +version = "0.0.0" +source = { virtual = "." } +dependencies = [ + { name = "mypy" }, + { name = "pre-commit" }, + { name = "ruff" }, + { name = "typer" }, +] + +[package.metadata] +requires-dist = [ + { name = "mypy", specifier = ">=1.10" }, + { name = "pre-commit", specifier = ">=3.7" }, + { name = "ruff", specifier = ">=0.6" }, + { name = "typer", specifier = ">=0.12" }, +] + +[[package]] +name = "typer" +version = "0.24.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/24/cb09efec5cc954f7f9b930bf8279447d24618bb6758d4f6adf2574c41780/typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45", size = 118613, upload-time = "2026-02-21T16:54:40.609Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.38.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d2/03/a94d404ca09a89a7301a7008467aed525d4cdeb9186d262154dd23208709/virtualenv-20.38.0.tar.gz", hash = "sha256:94f39b1abaea5185bf7ea5a46702b56f1d0c9aa2f41a6c2b8b0af4ddc74c10a7", size = 5864558, upload-time = "2026-02-19T07:48:02.385Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/394801755d4c8684b655d35c665aea7836ec68320304f62ab3c94395b442/virtualenv-20.38.0-py3-none-any.whl", hash = "sha256:d6e78e5889de3a4742df2d3d44e779366325a90cf356f15621fddace82431794", size = 5837778, upload-time = "2026-02-19T07:47:59.778Z" }, +]