Skip to content

ci: add ScottyLabs Wrapped analysis workflow #1

ci: add ScottyLabs Wrapped analysis workflow

ci: add ScottyLabs Wrapped analysis workflow #1

name: ScottyLabs Wrapped Analysis
on:
push:
branches:
- 'wrapped-analysis-*'
jobs:
analyze:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Get full history for git log analysis
- name: Install cargo-binstall
uses: cargo-bins/cargo-binstall@main
- name: Install tokei
run: cargo binstall --no-confirm tokei
- name: Collect commit statistics
run: |
# Extract year from branch name (wrapped-analysis-2025 -> 2025)
year=$(echo "${{ github.ref_name }}" | sed 's/wrapped-analysis-//')
# Get commits for the specified year
git log --all --since="${year}-01-01" --until="${year}-12-31" \
--pretty=format:'{"sha":"%H","author_name":"%an","author_email":"%ae","timestamp":"%aI","message":"%s"}' \
--numstat | \
python3 -c '
import sys
import json
commits = []
current_commit = None
for line in sys.stdin:
line = line.rstrip()
if not line:
continue
if line.startswith("{"):
# New commit header
if current_commit and current_commit.get("files_changed"):
commits.append(current_commit)
current_commit = json.loads(line)
current_commit["files_changed"] = []
current_commit["additions"] = 0
current_commit["deletions"] = 0
else:
# File change stats (additions, deletions, filename)
parts = line.split("\t")
if len(parts) == 3:
additions, deletions, filename = parts
try:
adds = int(additions) if additions != "-" else 0
dels = int(deletions) if deletions != "-" else 0
except ValueError:
adds, dels = 0, 0
current_commit["additions"] += adds
current_commit["deletions"] += dels
current_commit["files_changed"].append({
"filename": filename,
"additions": adds,
"deletions": dels
})
# Add last commit
if current_commit and current_commit.get("files_changed"):
commits.append(current_commit)
print(json.dumps(commits, indent=2))
' > commits.json
echo "Collected $(jq length commits.json) commits"
- name: Collect language statistics
run: |
tokei --output json > languages.json
echo "Analyzed $(jq 'keys | length' languages.json) languages"
- name: Collect PR statistics
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Extract year from branch name
year=$(echo "${{ github.ref_name }}" | sed 's/wrapped-analysis-//')
# Fetch all PRs created in the specified year
gh pr list --state all --limit 1000 \
--json number,author,state,createdAt,mergedAt,additions,deletions,reviews \
| jq --arg year "$year" '
map(select(.createdAt | startswith($year)))
' > prs.json
echo "Collected $(jq length prs.json) PRs"
- name: Collect issue statistics
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Extract year from branch name
year=$(echo "${{ github.ref_name }}" | sed 's/wrapped-analysis-//')
# Fetch all issues created in the specified year
gh issue list --state all --limit 1000 \
--json number,author,state,createdAt,closedAt,comments \
| jq --arg year "$year" '
map(select(.createdAt | startswith($year)))
' > issues.json
# Fetch commenters for each issue
jq -r '.[].number' issues.json | while read -r issue_num; do
gh api "/repos/${{ github.repository }}/issues/${issue_num}/comments" \
--jq '[.[].user.login] | unique' > "issue_${issue_num}_commenters.json" 2>/dev/null || echo '[]' > "issue_${issue_num}_commenters.json"
done
# Merge commenters into issues.json
python3 << 'PYTHON_EOF'
import json
with open("issues.json") as f:
issues = json.load(f)
for issue in issues:
commenter_file = f"issue_{issue['number']}_commenters.json"
try:
with open(commenter_file) as f:
issue["commenters"] = json.load(f)
except FileNotFoundError:
issue["commenters"] = []
with open("issues_with_commenters.json", "w") as f:
json.dump(issues, f, indent=2)
PYTHON_EOF
mv issues_with_commenters.json issues.json
rm -f issue_*_commenters.json
echo "Collected $(jq length issues.json) issues"
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: stats-${{ github.event.repository.name }}
path: |
commits.json
languages.json
prs.json
issues.json
retention-days: 7