Files
blackroad-os-kpis/collectors/github-deep.sh
Alexa Amundson 42fb629ba5 v2: 9 collectors, 60+ KPIs — cloudflare, services, local mac, github-deep, autonomy bars
RoadChain-SHA2048: f59eb7b3bb74e603
RoadChain-Identity: alexa@sovereign
RoadChain-Full: f59eb7b3bb74e60325e3dc2bde2dce2d3f77e4afdadb4b559bf44b95318aac5e44854cc056bec1414243177e469feccdf8a5bf21697916f004706ac784fc70a9ce221703ff29c91581884e5903b5d4a6127a3b570eda54801cf7641a17b13490bb8a3d3be04ee01a96030087800f62f02f47e6ca3d76a3e832c8cdfbeefa3ffbd57acc133d9a7d684161e565dd53636a48410cd38d322620c3fac516a79e5831edf3dab2e81c484f583628c617f85f066351f403163cea6e939484ab33bdaaaa27b23695999aa0e68ae9bff10bf9dfbabcd2785b286600e940359f8e2122c708ed62e7a358accafd224da40151104d77017d4c91fa5b35cce8ca7a728d1b37e0
2026-03-12 23:45:44 -05:00

109 lines
3.4 KiB
Bash

#!/bin/bash
# Deep GitHub metrics: stars, forks, traffic, languages, profile stats
source "$(dirname "$0")/../lib/common.sh"
log "Collecting deep GitHub metrics..."
OUT=$(snapshot_file github-deep)
# Profile stats
profile=$(gh api users/$GITHUB_USER 2>/dev/null || echo '{}')
followers=$(echo "$profile" | python3 -c "import json,sys; print(json.load(sys.stdin).get('followers',0))" 2>/dev/null || echo 0)
following=$(echo "$profile" | python3 -c "import json,sys; print(json.load(sys.stdin).get('following',0))" 2>/dev/null || echo 0)
public_repos=$(echo "$profile" | python3 -c "import json,sys; print(json.load(sys.stdin).get('public_repos',0))" 2>/dev/null || echo 0)
public_gists=$(echo "$profile" | python3 -c "import json,sys; print(json.load(sys.stdin).get('public_gists',0))" 2>/dev/null || echo 0)
ok "Profile: $public_repos repos, $followers followers"
# Aggregate stars, forks, sizes across all repos
repo_stats=$(gh api "users/$GITHUB_USER/repos?per_page=100&type=owner" --paginate 2>/dev/null | python3 -c "
import json, sys
repos = []
for line in sys.stdin:
try:
repos.extend(json.loads(line))
except:
pass
total_stars = sum(r.get('stargazers_count', 0) for r in repos)
total_forks = sum(r.get('forks_count', 0) for r in repos)
total_watchers = sum(r.get('watchers_count', 0) for r in repos)
total_size_kb = sum(r.get('size', 0) for r in repos)
total_open_issues = sum(r.get('open_issues_count', 0) for r in repos)
archived = sum(1 for r in repos if r.get('archived'))
active = len(repos) - archived
# Languages
langs = {}
for r in repos:
l = r.get('language')
if l:
langs[l] = langs.get(l, 0) + 1
# Most recently updated
recent = sorted(repos, key=lambda r: r.get('updated_at', ''), reverse=True)[:10]
recent_names = [r['full_name'] for r in recent]
# Largest repos
largest = sorted(repos, key=lambda r: r.get('size', 0), reverse=True)[:10]
largest_info = [{r['full_name']: round(r['size']/1024, 1)} for r in largest]
print(json.dumps({
'total_stars': total_stars,
'total_forks': total_forks,
'total_watchers': total_watchers,
'total_size_mb': round(total_size_kb / 1024, 1),
'total_open_issues': total_open_issues,
'archived': archived,
'active': active,
'languages': langs,
'top_10_recent': recent_names,
'top_10_largest_mb': largest_info
}))
" 2>/dev/null || echo '{}')
# Org stats
org_stats='{'
first=true
for org in $GITHUB_ORGS; do
org_repos=$(gh api "orgs/$org/repos?per_page=100" --paginate --jq 'length' 2>/dev/null || echo 0)
org_members=$(gh api "orgs/$org/members?per_page=100" --jq 'length' 2>/dev/null || echo 0)
if [ "$first" = true ]; then
org_stats="$org_stats\"$org\": {\"repos\": $org_repos, \"members\": $org_members}"
first=false
else
org_stats="$org_stats, \"$org\": {\"repos\": $org_repos, \"members\": $org_members}"
fi
done
org_stats="$org_stats}"
ok "Orgs: $org_stats"
python3 -c "
import json
repo_stats = json.loads('''$repo_stats''')
org_stats = json.loads('''$org_stats''')
output = {
'source': 'github-deep',
'collected_at': '$TIMESTAMP',
'date': '$TODAY',
'profile': {
'followers': $followers,
'following': $following,
'public_repos': $public_repos,
'public_gists': $public_gists
},
'repos': repo_stats,
'orgs': org_stats
}
with open('$OUT', 'w') as f:
json.dump(output, f, indent=2)
" 2>/dev/null
ok "Deep GitHub metrics collected"