|
| 1 | +#!/usr/bin/env python3 |
| 2 | +import json |
| 3 | +import os |
| 4 | +import pathlib |
| 5 | +import urllib.request |
| 6 | +from datetime import datetime |
| 7 | + |
| 8 | +API_VERSION = "2022-11-28" |
| 9 | + |
| 10 | +def gh_request(url: str, token: str) -> tuple[int, str]: |
| 11 | + req = urllib.request.Request(url) |
| 12 | + req.add_header("Accept", "application/vnd.github+json") |
| 13 | + req.add_header("Authorization", f"Bearer {token}") |
| 14 | + req.add_header("X-GitHub-Api-Version", API_VERSION) |
| 15 | + try: |
| 16 | + with urllib.request.urlopen(req) as resp: |
| 17 | + return resp.status, resp.read().decode("utf-8") |
| 18 | + except urllib.error.HTTPError as e: |
| 19 | + body = e.read().decode("utf-8", errors="replace") |
| 20 | + return e.code, body |
| 21 | + |
| 22 | +def write_json(path: pathlib.Path, obj) -> None: |
| 23 | + path.parent.mkdir(parents=True, exist_ok=True) |
| 24 | + path.write_text(json.dumps(obj, ensure_ascii=False, separators=(",", ":"), sort_keys=True) + "\n", encoding="utf-8") |
| 25 | + |
| 26 | +def main(): |
| 27 | + repo = os.environ.get("GITHUB_REPOSITORY", "") |
| 28 | + token = os.environ.get("TRAFFIC_TOKEN", "") |
| 29 | + if not repo or not token: |
| 30 | + raise SystemExit("Missing GITHUB_REPOSITORY or TRAFFIC_TOKEN") |
| 31 | + |
| 32 | + owner, name = repo.split("/", 1) |
| 33 | + url = f"https://api.github.com/repos/{owner}/{name}/traffic/clones?per=day" |
| 34 | + |
| 35 | + status, text = gh_request(url, token) |
| 36 | + |
| 37 | + # 202 = dati in preparazione (capita). In quel caso non tocchiamo i file. |
| 38 | + if status == 202: |
| 39 | + print("GitHub returned 202 Accepted (traffic data being generated). Skipping update.") |
| 40 | + return |
| 41 | + |
| 42 | + if status != 200: |
| 43 | + print(f"GitHub API error {status}: {text}") |
| 44 | + raise SystemExit(1) |
| 45 | + |
| 46 | + data = json.loads(text) |
| 47 | + count_14d = int(data.get("count", 0)) |
| 48 | + uniques_14d = int(data.get("uniques", 0)) |
| 49 | + points = data.get("clones", []) # lista con timestamp/count/uniques (ultimi 14 giorni) |
| 50 | + |
| 51 | + out_dir = pathlib.Path(".github/traffic") |
| 52 | + history_path = out_dir / "clones-history.json" |
| 53 | + |
| 54 | + # Storico: dizionario timestamp -> {count, uniques} |
| 55 | + if history_path.exists(): |
| 56 | + history = json.loads(history_path.read_text(encoding="utf-8")) |
| 57 | + else: |
| 58 | + history = {} |
| 59 | + |
| 60 | + for p in points: |
| 61 | + ts = p.get("timestamp") |
| 62 | + if ts: |
| 63 | + history[ts] = {"count": int(p.get("count", 0)), "uniques": int(p.get("uniques", 0))} |
| 64 | + |
| 65 | + total_tracked = sum(v.get("count", 0) for v in history.values()) |
| 66 | + |
| 67 | + # Badge JSON (Shields endpoint schema) |
| 68 | + badge_14d = { |
| 69 | + "schemaVersion": 1, |
| 70 | + "label": "clones (14d)", |
| 71 | + "message": f"{count_14d} ({uniques_14d} uniques)", |
| 72 | + } |
| 73 | + badge_total = { |
| 74 | + "schemaVersion": 1, |
| 75 | + "label": "clones (tracked)", |
| 76 | + "message": str(total_tracked), |
| 77 | + } |
| 78 | + |
| 79 | + write_json(history_path, dict(sorted(history.items()))) |
| 80 | + write_json(out_dir / "clones-14d.json", badge_14d) |
| 81 | + write_json(out_dir / "clones-total.json", badge_total) |
| 82 | + |
| 83 | + # Log utile |
| 84 | + print(f"Updated: 14d={count_14d} uniques={uniques_14d} total_tracked={total_tracked} points={len(points)} at {datetime.utcnow().isoformat()}Z") |
| 85 | + |
| 86 | +if __name__ == "__main__": |
| 87 | + main() |
0 commit comments