Skip to content

Commit ab29f73

Browse files
github-actions[bot]ap-1
authored andcommitted
ci: add ScottyLabs Wrapped analysis workflow
1 parent 249eceb commit ab29f73

File tree

1 file changed

+184
-0
lines changed

1 file changed

+184
-0
lines changed
Lines changed: 184 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,184 @@
1+
name: ScottyLabs Wrapped Analysis
2+
3+
on:
4+
push:
5+
branches:
6+
- 'wrapped-analysis-*'
7+
8+
jobs:
9+
analyze:
10+
runs-on: ubuntu-latest
11+
12+
steps:
13+
- name: Checkout repository
14+
uses: actions/checkout@v4
15+
with:
16+
fetch-depth: 0 # Get full history for git log analysis
17+
18+
- name: Install cargo-binstall
19+
uses: cargo-bins/cargo-binstall@main
20+
21+
- name: Install tokei
22+
run: cargo binstall --no-confirm tokei
23+
24+
- name: Collect commit statistics
25+
run: |
26+
# Extract year from branch name (wrapped-analysis-2025 -> 2025)
27+
year=$(echo "${{ github.ref_name }}" | sed 's/wrapped-analysis-//')
28+
29+
# Get commits for the specified year using format with separator
30+
# If no commits found, create empty array
31+
git log --all --since="${year}-01-01" --until="${year}-12-31" \
32+
--pretty=format:'%H%x1F%an%x1F%ae%x1F%aI%x1F%s' \
33+
--numstat 2>/dev/null | \
34+
python3 << 'PYTHON_EOF' > commits.json
35+
import sys
36+
import json
37+
38+
commits = []
39+
current_commit = None
40+
41+
try:
42+
for line in sys.stdin:
43+
line = line.rstrip()
44+
if not line:
45+
continue
46+
47+
# Check if this is a commit header (contains the separator character)
48+
if '\x1f' in line:
49+
# Save previous commit if it exists
50+
if current_commit and current_commit.get("files_changed"):
51+
commits.append(current_commit)
52+
53+
# Parse new commit header
54+
parts = line.split('\x1f')
55+
if len(parts) == 5:
56+
current_commit = {
57+
"sha": parts[0],
58+
"author_name": parts[1],
59+
"author_email": parts[2],
60+
"timestamp": parts[3],
61+
"message": parts[4],
62+
"files_changed": [],
63+
"additions": 0,
64+
"deletions": 0
65+
}
66+
else:
67+
# File change stats (additions, deletions, filename)
68+
if current_commit:
69+
parts = line.split("\t")
70+
if len(parts) == 3:
71+
additions, deletions, filename = parts
72+
try:
73+
adds = int(additions) if additions != "-" else 0
74+
dels = int(deletions) if deletions != "-" else 0
75+
except ValueError:
76+
adds, dels = 0, 0
77+
78+
current_commit["additions"] += adds
79+
current_commit["deletions"] += dels
80+
current_commit["files_changed"].append({
81+
"filename": filename,
82+
"additions": adds,
83+
"deletions": dels
84+
})
85+
86+
# Add last commit
87+
if current_commit and current_commit.get("files_changed"):
88+
commits.append(current_commit)
89+
except Exception:
90+
# If there's any error, just use empty commits
91+
pass
92+
93+
print(json.dumps(commits, indent=2))
94+
PYTHON_EOF
95+
96+
echo "Collected $(jq length commits.json) commits"
97+
98+
- name: Collect language statistics
99+
run: |
100+
tokei --output json > languages.json
101+
echo "Analyzed $(jq 'keys | length' languages.json) languages"
102+
103+
- name: Collect PR statistics
104+
env:
105+
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
106+
run: |
107+
# Extract year from branch name
108+
year=$(echo "${{ github.ref_name }}" | sed 's/wrapped-analysis-//')
109+
110+
# Fetch all PRs created in the specified year
111+
gh pr list --state all --limit 1000 \
112+
--json number,author,state,createdAt,mergedAt,additions,deletions,reviews \
113+
| jq --arg year "$year" '
114+
map(select(.createdAt | startswith($year)))
115+
' > prs.json
116+
117+
echo "Collected $(jq length prs.json) PRs"
118+
119+
- name: Collect issue statistics
120+
env:
121+
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
122+
run: |
123+
# Extract year from branch name
124+
year=$(echo "${{ github.ref_name }}" | sed 's/wrapped-analysis-//')
125+
126+
# Fetch all issues created in the specified year
127+
# Handle repositories with disabled issues by outputting empty array if command fails
128+
gh issue list --state all --limit 1000 \
129+
--json number,author,state,createdAt,closedAt,comments 2>/dev/null \
130+
| jq --arg year "$year" '
131+
map(select(.createdAt | startswith($year)))
132+
' > issues.json || echo '[]' > issues.json
133+
134+
# Fetch commenters for each issue
135+
jq -r '.[].number' issues.json | while read -r issue_num; do
136+
gh api "/repos/${{ github.repository }}/issues/${issue_num}/comments" \
137+
--jq '[.[].user.login] | unique' > "issue_${issue_num}_commenters.json" 2>/dev/null || echo '[]' > "issue_${issue_num}_commenters.json"
138+
done
139+
140+
# Merge commenters into issues.json
141+
python3 << 'PYTHON_EOF'
142+
import json
143+
import sys
144+
145+
# Read and validate issues.json
146+
try:
147+
with open("issues.json") as f:
148+
content = f.read().strip()
149+
if not content:
150+
# Empty file, write empty array
151+
issues = []
152+
else:
153+
issues = json.loads(content)
154+
except (json.JSONDecodeError, FileNotFoundError):
155+
# Invalid JSON or missing file, use empty array
156+
issues = []
157+
158+
for issue in issues:
159+
commenter_file = f"issue_{issue['number']}_commenters.json"
160+
try:
161+
with open(commenter_file) as f:
162+
issue["commenters"] = json.load(f)
163+
except (FileNotFoundError, json.JSONDecodeError):
164+
issue["commenters"] = []
165+
166+
with open("issues_with_commenters.json", "w") as f:
167+
json.dump(issues, f, indent=2)
168+
PYTHON_EOF
169+
170+
mv issues_with_commenters.json issues.json
171+
rm -f issue_*_commenters.json
172+
173+
echo "Collected $(jq length issues.json) issues"
174+
175+
- name: Upload artifacts
176+
uses: actions/upload-artifact@v4
177+
with:
178+
name: stats-${{ github.event.repository.name }}
179+
path: |
180+
commits.json
181+
languages.json
182+
prs.json
183+
issues.json
184+
retention-days: 7

0 commit comments

Comments
 (0)