|
| 1 | +name: ScottyLabs Wrapped Analysis |
| 2 | + |
| 3 | +on: |
| 4 | + push: |
| 5 | + branches: |
| 6 | + - 'wrapped-analysis-*' |
| 7 | + |
| 8 | +jobs: |
| 9 | + analyze: |
| 10 | + runs-on: ubuntu-latest |
| 11 | + |
| 12 | + steps: |
| 13 | + - name: Checkout repository |
| 14 | + uses: actions/checkout@v4 |
| 15 | + with: |
| 16 | + fetch-depth: 0 # Get full history for git log analysis |
| 17 | + |
| 18 | + - name: Install cargo-binstall |
| 19 | + uses: cargo-bins/cargo-binstall@main |
| 20 | + |
| 21 | + - name: Install tokei |
| 22 | + run: cargo binstall --no-confirm tokei |
| 23 | + |
| 24 | + - name: Collect commit statistics |
| 25 | + run: | |
| 26 | + # Extract year from branch name (wrapped-analysis-2025 -> 2025) |
| 27 | + year=$(echo "${{ github.ref_name }}" | sed 's/wrapped-analysis-//') |
| 28 | +
|
| 29 | + # Get commits for the specified year |
| 30 | + git log --all --since="${year}-01-01" --until="${year}-12-31" \ |
| 31 | + --pretty=format:'{"sha":"%H","author_name":"%an","author_email":"%ae","timestamp":"%aI","message":"%s"}' \ |
| 32 | + --numstat | \ |
| 33 | + python3 -c ' |
| 34 | + import sys |
| 35 | + import json |
| 36 | +
|
| 37 | + commits = [] |
| 38 | + current_commit = None |
| 39 | +
|
| 40 | + for line in sys.stdin: |
| 41 | + line = line.rstrip() |
| 42 | + if not line: |
| 43 | + continue |
| 44 | +
|
| 45 | + if line.startswith("{"): |
| 46 | + # New commit header |
| 47 | + if current_commit and current_commit.get("files_changed"): |
| 48 | + commits.append(current_commit) |
| 49 | + current_commit = json.loads(line) |
| 50 | + current_commit["files_changed"] = [] |
| 51 | + current_commit["additions"] = 0 |
| 52 | + current_commit["deletions"] = 0 |
| 53 | + else: |
| 54 | + # File change stats (additions, deletions, filename) |
| 55 | + parts = line.split("\t") |
| 56 | + if len(parts) == 3: |
| 57 | + additions, deletions, filename = parts |
| 58 | + try: |
| 59 | + adds = int(additions) if additions != "-" else 0 |
| 60 | + dels = int(deletions) if deletions != "-" else 0 |
| 61 | + except ValueError: |
| 62 | + adds, dels = 0, 0 |
| 63 | +
|
| 64 | + current_commit["additions"] += adds |
| 65 | + current_commit["deletions"] += dels |
| 66 | + current_commit["files_changed"].append({ |
| 67 | + "filename": filename, |
| 68 | + "additions": adds, |
| 69 | + "deletions": dels |
| 70 | + }) |
| 71 | +
|
| 72 | + # Add last commit |
| 73 | + if current_commit and current_commit.get("files_changed"): |
| 74 | + commits.append(current_commit) |
| 75 | +
|
| 76 | + print(json.dumps(commits, indent=2)) |
| 77 | + ' > commits.json |
| 78 | +
|
| 79 | + echo "Collected $(jq length commits.json) commits" |
| 80 | +
|
| 81 | + - name: Collect language statistics |
| 82 | + run: | |
| 83 | + tokei --output json > languages.json |
| 84 | + echo "Analyzed $(jq 'keys | length' languages.json) languages" |
| 85 | +
|
| 86 | + - name: Collect PR statistics |
| 87 | + env: |
| 88 | + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
| 89 | + run: | |
| 90 | + # Extract year from branch name |
| 91 | + year=$(echo "${{ github.ref_name }}" | sed 's/wrapped-analysis-//') |
| 92 | +
|
| 93 | + # Fetch all PRs created in the specified year |
| 94 | + gh pr list --state all --limit 1000 \ |
| 95 | + --json number,author,state,createdAt,mergedAt,additions,deletions,reviews \ |
| 96 | + | jq --arg year "$year" ' |
| 97 | + map(select(.createdAt | startswith($year))) |
| 98 | + ' > prs.json |
| 99 | +
|
| 100 | + echo "Collected $(jq length prs.json) PRs" |
| 101 | +
|
| 102 | + - name: Collect issue statistics |
| 103 | + env: |
| 104 | + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
| 105 | + run: | |
| 106 | + # Extract year from branch name |
| 107 | + year=$(echo "${{ github.ref_name }}" | sed 's/wrapped-analysis-//') |
| 108 | +
|
| 109 | + # Fetch all issues created in the specified year |
| 110 | + gh issue list --state all --limit 1000 \ |
| 111 | + --json number,author,state,createdAt,closedAt,comments \ |
| 112 | + | jq --arg year "$year" ' |
| 113 | + map(select(.createdAt | startswith($year))) |
| 114 | + ' > issues.json |
| 115 | +
|
| 116 | + # Fetch commenters for each issue |
| 117 | + jq -r '.[].number' issues.json | while read -r issue_num; do |
| 118 | + gh api "/repos/${{ github.repository }}/issues/${issue_num}/comments" \ |
| 119 | + --jq '[.[].user.login] | unique' > "issue_${issue_num}_commenters.json" 2>/dev/null || echo '[]' > "issue_${issue_num}_commenters.json" |
| 120 | + done |
| 121 | +
|
| 122 | + # Merge commenters into issues.json |
| 123 | + python3 << 'PYTHON_EOF' |
| 124 | + import json |
| 125 | + with open("issues.json") as f: |
| 126 | + issues = json.load(f) |
| 127 | + for issue in issues: |
| 128 | + commenter_file = f"issue_{issue['number']}_commenters.json" |
| 129 | + try: |
| 130 | + with open(commenter_file) as f: |
| 131 | + issue["commenters"] = json.load(f) |
| 132 | + except FileNotFoundError: |
| 133 | + issue["commenters"] = [] |
| 134 | + with open("issues_with_commenters.json", "w") as f: |
| 135 | + json.dump(issues, f, indent=2) |
| 136 | + PYTHON_EOF |
| 137 | +
|
| 138 | + mv issues_with_commenters.json issues.json |
| 139 | + rm -f issue_*_commenters.json |
| 140 | +
|
| 141 | + echo "Collected $(jq length issues.json) issues" |
| 142 | +
|
| 143 | + - name: Upload artifacts |
| 144 | + uses: actions/upload-artifact@v4 |
| 145 | + with: |
| 146 | + name: stats-${{ github.event.repository.name }} |
| 147 | + path: | |
| 148 | + commits.json |
| 149 | + languages.json |
| 150 | + prs.json |
| 151 | + issues.json |
| 152 | + retention-days: 7 |
0 commit comments