47
47
"Accept" : "application/vnd.github.v3+json" ,
48
48
}
49
49
50
+
50
51
class DateTimeCache (diskcache .Cache ):
51
52
"""Custom cache class that handles datetime serialization."""
52
-
53
+
53
54
def __setitem__ (self , key , value ):
54
55
"""Override to serialize datetime objects."""
55
56
if isinstance (value , datetime ):
56
- value = {' __datetime__' : value .isoformat ()}
57
+ value = {" __datetime__" : value .isoformat ()}
57
58
super ().__setitem__ (key , value )
58
-
59
+
59
60
def __getitem__ (self , key ):
60
61
"""Override to deserialize datetime objects."""
61
62
value = super ().__getitem__ (key )
62
- if isinstance (value , dict ) and ' __datetime__' in value :
63
- return datetime .fromisoformat (value [' __datetime__' ])
63
+ if isinstance (value , dict ) and " __datetime__" in value :
64
+ return datetime .fromisoformat (value [" __datetime__" ])
64
65
return value
65
-
66
+
66
67
def get (self , key , default = None , retry = False ):
67
68
"""Override to handle datetime deserialization in get method with retry."""
68
69
try :
69
70
return super ().get (key , default = default , retry = retry )
70
71
except KeyError :
71
72
return default
72
73
74
+
73
75
# Configure DiskCache in the current directory
74
76
CACHE_DIR = "github_cache"
75
77
cache = DateTimeCache (CACHE_DIR )
76
78
79
+
77
80
async def get_org_members (session : aiohttp .ClientSession , org : str ) -> List [Dict ]:
78
81
"""Fetch all members of a GitHub organization with caching.
79
82
@@ -99,7 +102,7 @@ async def get_org_members(session: aiohttp.ClientSession, org: str) -> List[Dict
99
102
Pagination is handled automatically (100 items per page).
100
103
"""
101
104
cache_key = f"org_members_{ org } "
102
-
105
+
103
106
# Try to get from cache with retry
104
107
cached_data = cache .get (cache_key , retry = True )
105
108
if cached_data is not None :
@@ -108,42 +111,47 @@ async def get_org_members(session: aiohttp.ClientSession, org: str) -> List[Dict
108
111
109
112
print (f"[yellow]Cache miss for { org } members - fetching from API[/yellow]" )
110
113
members = []
111
-
114
+
112
115
try :
113
116
for page in count (1 ):
114
117
url = f"https://api.github.com/orgs/{ org } /members?page={ page } &per_page=100"
115
118
async with session .get (url , headers = headers ) as response :
116
119
if response .status != 200 :
117
- print (f"[red]Error fetching members for { org } : { response .status } [/red]" )
120
+ print (
121
+ f"[red]Error fetching members for { org } : { response .status } [/red]"
122
+ )
118
123
break
119
-
124
+
120
125
page_members = await response .json ()
121
126
if not page_members :
122
127
break
123
-
128
+
124
129
members .extend (page_members )
125
-
130
+
126
131
# Cache the results
127
132
cache [cache_key ] = members # Using __setitem__ instead of set()
128
133
print (f"[green]Cached { len (members )} members for { org } [/green]" )
129
134
return members
130
-
135
+
131
136
except Exception as e :
132
137
print (f"[red]Error fetching members for { org } : { str (e )} [/red]" )
133
138
return []
134
139
135
- async def get_user_activity (session : aiohttp .ClientSession , username : str ) -> Optional [datetime ]:
140
+
141
+ async def get_user_activity (
142
+ session : aiohttp .ClientSession , username : str
143
+ ) -> Optional [datetime ]:
136
144
"""Fetch the last public activity date for a GitHub user."""
137
145
cache_key = f"user_activity_{ username } "
138
-
146
+
139
147
# Try to get from cache
140
148
cached_data = cache .get (cache_key )
141
149
if cached_data is not None :
142
150
print (f"[cyan]Cache hit for { username } activity[/cyan]" )
143
151
return cached_data
144
152
145
153
print (f"[yellow]Cache miss for { username } activity - fetching from API[/yellow]" )
146
-
154
+
147
155
try :
148
156
print (f"Getting activity for { username } " )
149
157
url = f"https://api.github.com/users/{ username } /events/public"
@@ -152,32 +160,42 @@ async def get_user_activity(session: aiohttp.ClientSession, username: str) -> Op
152
160
print (f"Got activity for { username } " )
153
161
events = await response .json ()
154
162
if events :
155
- last_activity = datetime .fromisoformat (events [0 ]["created_at" ].replace ('Z' , '+00:00' ))
163
+ last_activity = datetime .fromisoformat (
164
+ events [0 ]["created_at" ].replace ("Z" , "+00:00" )
165
+ )
156
166
# Cache the results
157
- cache [cache_key ] = last_activity # Using __setitem__ instead of set()
167
+ cache [cache_key ] = (
168
+ last_activity # Using __setitem__ instead of set()
169
+ )
158
170
print (f"[green]Cached activity for { username } [/green]" )
159
171
return last_activity
160
172
else :
161
173
print (f"[yellow]No activity found for { username } [/yellow]" )
162
174
cache [cache_key ] = None # Using __setitem__ instead of set()
163
175
else :
164
- print (f"[red]Error fetching activity for { username } : { response .status } [/red]" )
176
+ print (
177
+ f"[red]Error fetching activity for { username } : { response .status } [/red]"
178
+ )
165
179
except Exception as e :
166
180
print (f"[red]Error fetching activity for { username } : { str (e )} [/red]" )
167
-
181
+
168
182
return None
169
183
184
+
170
185
def get_cache_size () -> str :
171
186
"""Get the current cache size in a human-readable format."""
172
187
try :
173
188
cache_path = pathlib .Path (CACHE_DIR )
174
189
if cache_path .exists ():
175
- total_size = sum (f .stat ().st_size for f in cache_path .rglob ('*' ) if f .is_file ())
190
+ total_size = sum (
191
+ f .stat ().st_size for f in cache_path .rglob ("*" ) if f .is_file ()
192
+ )
176
193
return f"{ total_size / 1024 / 1024 :.1f} MB"
177
194
except Exception :
178
195
pass
179
196
return "unknown size"
180
197
198
+
181
199
def clear_cache () -> None :
182
200
"""Clear the disk cache."""
183
201
try :
@@ -186,6 +204,7 @@ def clear_cache() -> None:
186
204
except Exception as e :
187
205
print (f"[red]Error clearing cache: { str (e )} [/red]" )
188
206
207
+
189
208
async def main ():
190
209
"""Main execution function."""
191
210
# Show cache status
@@ -194,16 +213,22 @@ async def main():
194
213
195
214
async with aiohttp .ClientSession () as session :
196
215
# Check rate limit
197
- async with session .get ("https://api.github.com/rate_limit" , headers = headers ) as response :
216
+ async with session .get (
217
+ "https://api.github.com/rate_limit" , headers = headers
218
+ ) as response :
198
219
if response .status == 200 :
199
220
rate_data = await response .json ()
200
221
remaining = rate_data ["resources" ]["core" ]["remaining" ]
201
- reset_time = datetime .fromtimestamp (rate_data ["resources" ]["core" ]["reset" ])
222
+ reset_time = datetime .fromtimestamp (
223
+ rate_data ["resources" ]["core" ]["reset" ]
224
+ )
202
225
reset_in = humanize .naturaltime (reset_time )
203
226
print (f"Rate limit remaining: { remaining } " )
204
227
print (f"Rate limit resets { reset_in } " )
205
228
if remaining < 100 :
206
- print (f"[yellow]Warning: Low rate limit ({ remaining } remaining)[/yellow]" )
229
+ print (
230
+ f"[yellow]Warning: Low rate limit ({ remaining } remaining)[/yellow]"
231
+ )
207
232
if remaining < 10 :
208
233
print ("[red]Aborting due to very low rate limit[/red]" )
209
234
return
@@ -230,18 +255,31 @@ async def main():
230
255
for (username , _ ), last_activity in zip (tasks , results ):
231
256
user_activities .append ((username , last_activity , all_members [username ]))
232
257
233
- for username , last_activity , user_orgs in sorted (user_activities , key = lambda x : x [1 ] if x [1 ] is not None else datetime .fromtimestamp (0 ), reverse = True ):
234
- last_activity_ago = humanize .naturaltime (datetime .now (last_activity .tzinfo ) - last_activity ) if last_activity else "[red]never[/red]"
258
+ for username , last_activity , user_orgs in sorted (
259
+ user_activities ,
260
+ key = lambda x : x [1 ] if x [1 ] is not None else datetime .fromtimestamp (0 ),
261
+ reverse = True ,
262
+ ):
263
+ last_activity_ago = (
264
+ humanize .naturaltime (datetime .now (last_activity .tzinfo ) - last_activity )
265
+ if last_activity
266
+ else "[red]never[/red]"
267
+ )
235
268
orgs_str = ", " .join (user_orgs )
236
- print (f"{ username :<20} : Last activity { last_activity_ago } in orgs: { orgs_str } " )
269
+ print (
270
+ f"{ username :<20} : Last activity { last_activity_ago } in orgs: { orgs_str } "
271
+ )
272
+
237
273
238
274
if __name__ == "__main__" :
239
275
parser = argparse .ArgumentParser (description = "GitHub Organization Activity Tracker" )
240
- parser .add_argument ('--clear-cache' , action = 'store_true' , help = 'Clear the cache before running' )
241
- parser .add_argument ('--debug' , action = 'store_true' , help = 'Show debug information' )
276
+ parser .add_argument (
277
+ "--clear-cache" , action = "store_true" , help = "Clear the cache before running"
278
+ )
279
+ parser .add_argument ("--debug" , action = "store_true" , help = "Show debug information" )
242
280
args = parser .parse_args ()
243
281
244
282
if args .clear_cache :
245
283
clear_cache ()
246
-
284
+
247
285
asyncio .run (main ())
0 commit comments