|
| 1 | +#!/usr/bin/env python |
| 2 | +# -*- coding: utf-8 -*- |
| 3 | + |
| 4 | +from __future__ import with_statement |
| 5 | + |
| 6 | +import sys |
| 7 | +import os |
| 8 | +import re |
| 9 | +import json |
| 10 | +import time |
| 11 | +import urllib2 |
| 12 | +import argparse |
| 13 | +import urllib |
| 14 | +import json |
| 15 | +import logging |
| 16 | +from datetime import datetime |
| 17 | +from pprint import pprint |
| 18 | + |
| 19 | +import smtplib |
| 20 | +from email.mime.text import MIMEText |
| 21 | + |
| 22 | +from pygithub3 import Github |
| 23 | + |
| 24 | +## ============================================== |
| 25 | +## LOGGING |
| 26 | +## ============================================== |
| 27 | + |
| 28 | +LOG = logging.getLogger(__name__) |
| 29 | +LOG_handler = logging.StreamHandler() |
| 30 | +LOG_formatter = logging.Formatter(fmt='%(asctime)s [%(funcName)s:%(lineno)03d] %(levelname)-5s: %(message)s', |
| 31 | + datefmt='%m-%d-%Y %H:%M:%S') |
| 32 | +LOG_handler.setFormatter(LOG_formatter) |
| 33 | +LOG.addHandler(LOG_handler) |
| 34 | +LOG.setLevel(logging.INFO) |
| 35 | + |
| 36 | +## ============================================== |
| 37 | +## CONFIGURATION |
| 38 | +## ============================================== |
| 39 | + |
| 40 | +GITHUB_USER = 'cmu-db' |
| 41 | +GITHUB_REPO = 'peloton' |
| 42 | + |
| 43 | +CACHE_FILEPATH = "/tmp/%d.github-cache" |
| 44 | + |
| 45 | + |
| 46 | + |
| 47 | +# ============================================== |
| 48 | +# pr_format |
| 49 | +# ============================================== |
| 50 | +def pr_format(pr): |
| 51 | + assert not pr is None |
| 52 | + ret = "%5s - %s [%s]\n" % ('#'+str(pr.number), pr.title, pr.user["login"]) |
| 53 | + ret += " %s" % (pr.html_url) |
| 54 | + return (ret) |
| 55 | +## DEF |
| 56 | + |
| 57 | +# ============================================== |
| 58 | +# main |
| 59 | +# ============================================== |
| 60 | +if __name__ == '__main__': |
| 61 | + aparser = argparse.ArgumentParser() |
| 62 | + aparser.add_argument('token', type=str, help='Github API Token') |
| 63 | + aparser.add_argument('--send', type=str, help='Send status report to given email') |
| 64 | + aparser.add_argument('--gmail-user', type=str, help='Gmail username') |
| 65 | + aparser.add_argument('--gmail-pass', type=str,help='Gmail password') |
| 66 | + aparser.add_argument("--cache", action='store_true', help="Enable caching of raw Github requests (for development only)") |
| 67 | + aparser.add_argument("--debug", action='store_true') |
| 68 | + args = vars(aparser.parse_args()) |
| 69 | + |
| 70 | + ## ---------------------------------------------- |
| 71 | + |
| 72 | + if args['debug']: |
| 73 | + LOG.setLevel(logging.DEBUG) |
| 74 | + |
| 75 | + ## ---------------------------------------------- |
| 76 | + |
| 77 | + gh = Github(token=args['token']) |
| 78 | + r = gh.repos.get(user=GITHUB_USER, repo='peloton') |
| 79 | + |
| 80 | + ## -------------------------------- |
| 81 | + ## PR MONITOR |
| 82 | + ## -------------------------------- |
| 83 | + # List of pull requests: |
| 84 | + pull_requests = gh.pull_requests.list(state='open', user=GITHUB_USER, repo=GITHUB_REPO).all() |
| 85 | + |
| 86 | + open_pulls = { } |
| 87 | + for pr in pull_requests: |
| 88 | + #if pr.number != 1109: continue |
| 89 | + |
| 90 | + #print pr |
| 91 | + #pprint(pr.__dict__) |
| 92 | + #print "="*100 |
| 93 | + |
| 94 | + # Get labels for this PR |
| 95 | + issue = gh.issues.get(pr.number, user=GITHUB_USER, repo=GITHUB_REPO) |
| 96 | + labels = [ i.name for i in issue.labels ] |
| 97 | + |
| 98 | + # Get events for this PR |
| 99 | + #events = gh.issues.events.list_by_issue(pr.number, user='cmu-db', repo=GITHUB_REPO).all() |
| 100 | + #for e in events: |
| 101 | + #pprint(e.__dict__) |
| 102 | + #print "-"*20 |
| 103 | + #sys.exit(0) |
| 104 | + |
| 105 | + |
| 106 | + # Get review comments for this issue |
| 107 | + cache_reviews = CACHE_FILEPATH % pr.number |
| 108 | + data = None |
| 109 | + if 'cache' in args and args['cache'] and os.path.exists(cache_reviews): |
| 110 | + LOG.debug("CACHED REVIEW COMMENTS '%s'" % cache_reviews) |
| 111 | + with open(cache_reviews, "r") as fd: |
| 112 | + data = fd.read() |
| 113 | + else: |
| 114 | + LOG.debug("Retrieving data from Github: '%s'" % pr.review_comments_url) |
| 115 | + data = urllib.urlopen(pr.review_comments_url).read() |
| 116 | + with open(cache_reviews, "w") as fd: |
| 117 | + fd.write(data) |
| 118 | + # IF |
| 119 | + reviews = json.loads(data) |
| 120 | + |
| 121 | + open_pulls[pr.number] = (pr, labels, reviews) |
| 122 | + ## FOR |
| 123 | + |
| 124 | + status = { |
| 125 | + "NeedReviewers": [ ], |
| 126 | + "ReviewMissing": [ ], |
| 127 | + "FollowUp": [ ], |
| 128 | + "ReadyToMerge": [ ], |
| 129 | + "NoLabels": [ ], |
| 130 | + } |
| 131 | + |
| 132 | + |
| 133 | + all_recieved_reviews = { } |
| 134 | + all_reviewers = { } |
| 135 | + for pr, labels, reviews in open_pulls.values(): |
| 136 | + LOG.debug("Pull Request #%d - LABELS: %s", pr.number, labels) |
| 137 | + |
| 138 | + # Step 1: Get the list of 'ready_for_review' PRs that do not have |
| 139 | + # an assigned reviewer |
| 140 | + if 'ready_for_review' in labels and \ |
| 141 | + len(pr.requested_reviewers) == 0: |
| 142 | + |
| 143 | + status['NeedReviewers'].append(pr.number) |
| 144 | + continue |
| 145 | + # IF |
| 146 | + |
| 147 | + # Step 2: Get the list of PRs that are 'ready_for_review' |
| 148 | + # and have a reviewer, but that reviewer hasn't put anything in yet |
| 149 | + # Or they are 'accepted' but not merged |
| 150 | + if 'ready_for_review' in labels or \ |
| 151 | + 'accepted' in labels: |
| 152 | + |
| 153 | + reviewers = set() |
| 154 | + for rr in pr.requested_reviewers: |
| 155 | + reviewers.add(rr["login"]) |
| 156 | + ## FOR |
| 157 | + |
| 158 | + recieved_reviews = set() |
| 159 | + for r in reviews: |
| 160 | + recieved_reviews.add(r["user"]["login"]) |
| 161 | + ## FOR |
| 162 | + if pr.user["login"] in recieved_reviews: |
| 163 | + recieved_reviews.remove(pr.user["login"]) |
| 164 | + |
| 165 | + if len(recieved_reviews) > 0: |
| 166 | + if 'accepted' in labels: |
| 167 | + status['ReadyToMerge'].append(pr.number) |
| 168 | + else: |
| 169 | + status['FollowUp'].append(pr.number) |
| 170 | + else: |
| 171 | + status['ReviewMissing'].append(pr.number) |
| 172 | + |
| 173 | + |
| 174 | + all_reviewers[pr.number] = reviewers |
| 175 | + all_recieved_reviews[pr.number] = recieved_reviews |
| 176 | + |
| 177 | + LOG.debug("REVIEWERS: %s", ",".join(reviewers)) |
| 178 | + LOG.debug("RECEIVED REVIEWS: %s", ",".join(recieved_reviews)) |
| 179 | + # IF |
| 180 | + |
| 181 | + # Step 4: Mark any PRs without labels |
| 182 | + if len(labels) == 0: |
| 183 | + status["NoLabels"].append(pr.number) |
| 184 | + # IF |
| 185 | + ## FOR |
| 186 | + |
| 187 | + content = "" |
| 188 | + linebreak = "-"*60 + "\n\n" |
| 189 | + |
| 190 | + ## NO LABELS |
| 191 | + content += "*NO LABELS*\n\n" |
| 192 | + if len(status['NoLabels']) == 0: |
| 193 | + content += "**NONE**\n\n" |
| 194 | + else: |
| 195 | + for pr_num in sorted(status['NoLabels']): |
| 196 | + content += pr_format(open_pulls[pr_num][0]) + "\n\n" |
| 197 | + content += linebreak |
| 198 | + |
| 199 | + ## READY TO MERGE |
| 200 | + content += "*READY TO MERGE*\n\n" |
| 201 | + if len(status['ReadyToMerge']) == 0: |
| 202 | + content += "**NONE**\n\n" |
| 203 | + else: |
| 204 | + for pr_num in sorted(status['ReadyToMerge']): |
| 205 | + content += pr_format(open_pulls[pr_num][0]) + "\n\n" |
| 206 | + content += linebreak |
| 207 | + |
| 208 | + ## READY FOR REVIEW, NO ASSIGNMENT |
| 209 | + content += "*READY FOR REVIEW WITHOUT ASSIGNMENT*\n\n" |
| 210 | + if len(status['NeedReviewers']) == 0: |
| 211 | + content += "**NONE**\n\n" |
| 212 | + else: |
| 213 | + for pr_num in sorted(status['NeedReviewers']): |
| 214 | + content += pr_format(open_pulls[pr_num][0]) + "\n\n" |
| 215 | + content += linebreak |
| 216 | + |
| 217 | + |
| 218 | + ## MISSING REVIEWS |
| 219 | + content += "*WAITING FOR REVIEW*\n\n" |
| 220 | + if len(status['ReviewMissing']) == 0: |
| 221 | + content += "**NONE**\n\n" |
| 222 | + else: |
| 223 | + for pr_num in sorted(status['ReviewMissing']): |
| 224 | + content += pr_format(open_pulls[pr_num][0]) + "\n\n" |
| 225 | + content += " Assigned Reviewers: %s\n" % (list(all_recieved_reviews[pr_num])) |
| 226 | + #content += linebreak |
| 227 | + |
| 228 | + |
| 229 | + if "send" in args and args["send"]: |
| 230 | + msg = MIMEText(content) |
| 231 | + msg['Subject'] = "%s PR Status Report (%s)" % (GITHUB_REPO.title(), datetime.now().strftime("%Y-%m-%d")) |
| 232 | + msg['From'] = EMAIL_FROM |
| 233 | + msg['To'] = args["send"] |
| 234 | + msg[ 'Reply-To'] = "[email protected]" |
| 235 | + |
| 236 | + server_ssl = smtplib.SMTP_SSL("smtp.gmail.com", 465) |
| 237 | + server_ssl.ehlo() # optional, called by login() |
| 238 | + server_ssl.login(args["gmail_user"], args["gmail_pass"]) |
| 239 | + # ssl server doesn't support or need tls, so don't call server_ssl.starttls() |
| 240 | + server_ssl.sendmail(msg['From'], [msg['To']], msg.as_string()) |
| 241 | + #server_ssl.quit() |
| 242 | + server_ssl.close() |
| 243 | + |
| 244 | + LOG.info("Status email sent to '%s'" % args["send"]) |
| 245 | + else: |
| 246 | + print content |
| 247 | + ## IF |
| 248 | + |
| 249 | +## MAIN |
0 commit comments