Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
151 changes: 151 additions & 0 deletions .github/workflows/clone-metrics.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
name: Track Clone Metrics

on:
workflow_dispatch:
schedule:
- cron: '0 8 * * *' # Run every day at 8am


jobs:
clone-stats:
runs-on: ubuntu-latest
permissions:
contents: write

steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history for proper branch operations

- name: Generate GitHub App token
id: generate_token
uses: tibdex/github-app-token@v2.1.0
with:
app_id: ${{ secrets.APP_ID }}
private_key: ${{ secrets.APP_PRIVATE_KEY }}

- name: Switch to metrics branch
run: |
# Checkout or create metrics branch
if git show-ref --verify --quiet refs/remotes/origin/metrics; then
echo "📋 Checking out existing metrics branch..."
git checkout -b metrics origin/metrics || git checkout metrics
else
echo "🆕 Creating new metrics branch..."
git checkout -b metrics
fi

- name: Fetch clone data
env:
TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
mkdir -p .metrics
# Fetch clone metrics (contains both daily breakdown and 14-day totals)
curl -s -H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer $TOKEN" \
https://api.github.com/repos/${{ github.repository }}/traffic/clones \
> .metrics/clone_stats.json

echo "Clone metrics:"
cat .metrics/clone_stats.json

- name: Update daily metrics
run: |
# Process each day from the clones array
LAST_UPDATED=$(date -u +"%Y-%m-%d %H:%M:%S UTC")

# Create daily CSV with header if it doesn't exist
if [ ! -f .metrics/daily_clone_metrics.csv ]; then
echo "date,total_clones,unique_cloners,last_updated" > .metrics/daily_clone_metrics.csv
fi

echo "📊 Processing daily metrics..."
jq -r '.clones[] | "\(.timestamp | split("T")[0]),\(.count),\(.uniques)"' .metrics/clone_stats.json | while IFS=',' read -r day_date count uniques; do
echo "Processing $day_date: $count clones, $uniques unique"

# Check if this date already exists in the CSV
if grep -q "^$day_date" .metrics/daily_clone_metrics.csv; then
echo "📝 Updating existing entry for $day_date..."
# Update existing entry
awk -v date="$day_date" -v count="$count" -v uniques="$uniques" -v last_updated="$LAST_UPDATED" '
BEGIN { FS=","; OFS="," }
/^[0-9]{4}-[0-9]{2}-[0-9]{2}/ && $1 == date {
print $1, count, uniques, last_updated;
updated=1;
next
}
{ print }
' .metrics/daily_clone_metrics.csv > .metrics/daily_clone_metrics_temp.csv
mv .metrics/daily_clone_metrics_temp.csv .metrics/daily_clone_metrics.csv
else
echo "➕ Adding new daily entry for $day_date..."
echo "$day_date,$count,$uniques,$LAST_UPDATED" >> .metrics/daily_clone_metrics.csv
fi
done

echo "Daily metrics:"
tail -n 5 .metrics/daily_clone_metrics.csv

- name: Update 14-day rolling metrics
run: |
# Process 14-day metrics
COUNT_14D=$(jq '.count' .metrics/clone_stats.json)
UNIQUES_14D=$(jq '.uniques' .metrics/clone_stats.json)
DATE_ONLY=$(date -u +"%Y-%m-%d")
LAST_UPDATED=$(date -u +"%Y-%m-%d %H:%M:%S UTC")

echo "📊 Processing 14-day metrics... for date: $DATE_ONLY"
echo "Processing values: $COUNT_14D clones, $UNIQUES_14D unique"

# Create 14-day CSV with header if it doesn't exist
if [ ! -f .metrics/rolling_14d_clone_metrics.csv ]; then
echo "date,total_clones_14d,unique_cloners_14d,last_updated" > .metrics/rolling_14d_clone_metrics.csv
echo "📄 Created new 14-day rolling CSV file"
fi

# Check if today's date already exists in the 14-day CSV
if grep -q "^$DATE_ONLY" .metrics/rolling_14d_clone_metrics.csv; then
echo "📝 Updating existing 14-day rolling entry for $DATE_ONLY..."
# Update existing entry
awk -v date="$DATE_ONLY" -v count="$COUNT_14D" -v uniques="$UNIQUES_14D" -v last_updated="$LAST_UPDATED" '
BEGIN { FS=","; OFS=","; updated=0 }
/^[0-9]{4}-[0-9]{2}-[0-9]{2}/ && $1 == date {
print $1, count, uniques, last_updated;
updated=1;
next
}
{ print }
END { if (!updated) print date, count, uniques, last_updated }
' .metrics/rolling_14d_clone_metrics.csv > .metrics/rolling_14d_clone_metrics_temp.csv
mv .metrics/rolling_14d_clone_metrics_temp.csv .metrics/rolling_14d_clone_metrics.csv
echo "✅ Updated existing entry"
else
echo "➕ Adding new 14-day rolling entry for $DATE_ONLY..."
echo "$DATE_ONLY,$COUNT_14D,$UNIQUES_14D,$LAST_UPDATED" >> .metrics/rolling_14d_clone_metrics.csv
echo "✅ Added new entry"
fi

echo "14-day rolling metrics:"
tail -n 5 .metrics/rolling_14d_clone_metrics.csv

- name: Commit and push results
env:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
git config user.name "CloneMetricsBot[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"

# Add both CSV files
git add .metrics/daily_clone_metrics.csv .metrics/rolling_14d_clone_metrics.csv

# Check if there are changes to commit
if git diff --staged --quiet; then
echo "ℹ️ No changes to commit - CSV data is up to date"
else
echo "📝 Committing changes..."
git commit -m "Automated update: repository clone metrics $(date)"

echo "🚀 Pushing to metrics branch..."
git push --force-with-lease origin metrics
fi
27 changes: 27 additions & 0 deletions .github/workflows/ruff.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Ruff Lint & Format

on:
pull_request:
push:
branches: [develop, main]

jobs:
ruff:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"

- name: Install Ruff
run: pip install ruff

- name: Run Ruff check (lint)
run: ruff check .

- name: Run Ruff format (verify formatting)
run: ruff format --check .
151 changes: 151 additions & 0 deletions .github/workflows/view-metrics.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
name: Track View Metrics

on:
workflow_dispatch:
workflow_run:
workflows: ["Track Clone Metrics"] # exact name of PyPI workflow
types: [completed]

jobs:
view-stats:
runs-on: ubuntu-latest
permissions:
contents: write

steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history for proper branch operations

- name: Generate GitHub App token
id: generate_token
uses: tibdex/github-app-token@v2.1.0
with:
app_id: ${{ secrets.APP_ID }}
private_key: ${{ secrets.APP_PRIVATE_KEY }}

- name: Switch to metrics branch
run: |
# Checkout or create metrics branch
if git show-ref --verify --quiet refs/remotes/origin/metrics; then
echo "📋 Checking out existing metrics branch..."
git checkout -b metrics origin/metrics || git checkout metrics
else
echo "🆕 Creating new metrics branch..."
git checkout -b metrics
fi

- name: Fetch view data
env:
TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
mkdir -p .metrics
# Fetch view metrics (contains both daily breakdown and 14-day totals)
curl -s -H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer $TOKEN" \
https://api.github.com/repos/${{ github.repository }}/traffic/views \
> .metrics/view_stats.json

echo "View metrics:"
cat .metrics/view_stats.json

- name: Update daily metrics
run: |
# Process each day from the views array
LAST_UPDATED=$(date -u +"%Y-%m-%d %H:%M:%S UTC")

# Create daily CSV with header if it doesn't exist
if [ ! -f .metrics/daily_view_metrics.csv ]; then
echo "date,total_views,unique_visitors,last_updated" > .metrics/daily_view_metrics.csv
fi

echo "📊 Processing daily metrics..."
jq -r '.views[] | "\(.timestamp | split("T")[0]),\(.count),\(.uniques)"' .metrics/view_stats.json | while IFS=',' read -r day_date count uniques; do
echo "Processing $day_date: $count views, $uniques unique"

# Check if this date already exists in the CSV
if grep -q "^$day_date" .metrics/daily_view_metrics.csv; then
echo "📝 Updating existing entry for $day_date..."
# Update existing entry
awk -v date="$day_date" -v count="$count" -v uniques="$uniques" -v last_updated="$LAST_UPDATED" '
BEGIN { FS=","; OFS="," }
/^[0-9]{4}-[0-9]{2}-[0-9]{2}/ && $1 == date {
print $1, count, uniques, last_updated;
updated=1;
next
}
{ print }
' .metrics/daily_view_metrics.csv > .metrics/daily_view_metrics_temp.csv
mv .metrics/daily_view_metrics_temp.csv .metrics/daily_view_metrics.csv
else
echo "➕ Adding new daily entry for $day_date..."
echo "$day_date,$count,$uniques,$LAST_UPDATED" >> .metrics/daily_view_metrics.csv
fi
done

echo "Daily metrics:"
tail -n 5 .metrics/daily_view_metrics.csv

- name: Update 14-day rolling metrics
run: |
# Process 14-day metrics
COUNT_14D=$(jq '.count' .metrics/view_stats.json)
UNIQUES_14D=$(jq '.uniques' .metrics/view_stats.json)
DATE_ONLY=$(date -u +"%Y-%m-%d")
LAST_UPDATED=$(date -u +"%Y-%m-%d %H:%M:%S UTC")

echo "📊 Processing 14-day metrics... for date: $DATE_ONLY"
echo "Processing values: $COUNT_14D views, $UNIQUES_14D unique"

# Create 14-day CSV with header if it doesn't exist
if [ ! -f .metrics/rolling_14d_view_metrics.csv ]; then
echo "date,total_views_14d,unique_visitors_14d,last_updated" > .metrics/rolling_14d_view_metrics.csv
echo "📄 Created new 14-day rolling CSV file"
fi

# Check if today's date already exists in the 14-day CSV
if grep -q "^$DATE_ONLY" .metrics/rolling_14d_view_metrics.csv; then
echo "📝 Updating existing 14-day rolling entry for $DATE_ONLY..."
# Update existing entry
awk -v date="$DATE_ONLY" -v count="$COUNT_14D" -v uniques="$UNIQUES_14D" -v last_updated="$LAST_UPDATED" '
BEGIN { FS=","; OFS=","; updated=0 }
/^[0-9]{4}-[0-9]{2}-[0-9]{2}/ && $1 == date {
print $1, count, uniques, last_updated;
updated=1;
next
}
{ print }
END { if (!updated) print date, count, uniques, last_updated }
' .metrics/rolling_14d_view_metrics.csv > .metrics/rolling_14d_view_metrics_temp.csv
mv .metrics/rolling_14d_view_metrics_temp.csv .metrics/rolling_14d_view_metrics.csv
echo "✅ Updated existing entry"
else
echo "➕ Adding new 14-day rolling entry for $DATE_ONLY..."
echo "$DATE_ONLY,$COUNT_14D,$UNIQUES_14D,$LAST_UPDATED" >> .metrics/rolling_14d_view_metrics.csv
echo "✅ Added new entry"
fi

echo "14-day rolling metrics:"
tail -n 5 .metrics/rolling_14d_view_metrics.csv

- name: Commit and push results
env:
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
run: |
git config user.name "ViewMetricsBot[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"

# Add both CSV files
git add .metrics/daily_view_metrics.csv .metrics/rolling_14d_view_metrics.csv

# Check if there are changes to commit
if git diff --staged --quiet; then
echo "ℹ️ No changes to commit - CSV data is up to date"
else
echo "📝 Committing changes..."
git commit -m "Automated update: repository view metrics $(date)"

echo "🚀 Pushing to metrics branch..."
git push --force-with-lease origin metrics
fi
Loading