Skip to content

Log traffic

Log traffic #1

Workflow file for this run

name: Log traffic
on:
schedule:
- cron: "17 2 * * *" # daily at 02:17 UTC
workflow_dispatch: # lets you run it manually
permissions:
contents: write # to commit CSV
jobs:
log:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Fetch traffic (views & clones) and append to CSV
env:
OWNER: your-username
REPO: your-repo
GH_TOKEN: ${{ secrets.TRAFFIC_TOKEN }}
run: |
set -euo pipefail
mkdir -p data
VIEWS_JSON=$(curl -s -H "Authorization: Bearer $GH_TOKEN" \
-H "Accept: application/vnd.github+json" \
https://api.github.com/repos/$OWNER/$REPO/traffic/views)
CLONES_JSON=$(curl -s -H "Authorization: Bearer $GH_TOKEN" \
-H "Accept: application/vnd.github+json" \
https://api.github.com/repos/$OWNER/$REPO/traffic/clones)
# Ensure CSV headers exist
test -f data/views.csv || echo "date,uniques,count" > data/views.csv
test -f data/clones.csv || echo "date,uniques,count" > data/clones.csv
# Append new rows (dedupe by date afterwards)
echo "$VIEWS_JSON" | jq -r '.views[] | "\(.timestamp[0:10]),\(.uniques),\(.count)"' >> data/views.csv
echo "$CLONES_JSON" | jq -r '.clones[] | "\(.timestamp[0:10]),\(.uniques),\(.count)"' >> data/clones.csv
# Dedupe keeping last occurrence per date
awk -F, 'NR==1{print;next} {d=$1; L[d]=$0} END{for(k in L) print L[k]}' data/views.csv | sort | awk 'NR==1; NR>1{print}' > data/views.tmp && mv data/views.tmp data/views.csv
awk -F, 'NR==1{print;next} {d=$1; L[d]=$0} END{for(k in L) print L[k]}' data/clones.csv | sort | awk 'NR==1; NR>1{print}' > data/clones.tmp && mv data/clones.tmp data/clones.csv
- name: Commit CSV
run: |
if [[ -n "$(git status --porcelain)" ]]; then
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add data/*.csv
git commit -m "Update traffic logs"
git push
fi