1818from typing import Any
1919
2020import requests
21+ from rich .console import Console
22+
23+
24+ # Global console instance for rich output
25+ console = Console ()
2126
2227
2328try :
2429 from google .cloud import firestore , storage # type: ignore[attr-defined]
2530except ImportError :
26- print ("Error: google-cloud-storage or google-cloud-firestore not installed." )
27- print ("Run: pip install google-cloud-storage google-cloud-firestore" )
31+ console .print (
32+ "[red]Error: google-cloud-storage or google-cloud-firestore not installed.[/red]"
33+ )
34+ console .print ("Run: pip install google-cloud-storage google-cloud-firestore" )
2835 sys .exit (1 )
2936
3037
@@ -39,11 +46,13 @@ def run_command(cmd: list[str]) -> Any:
3946 )
4047 return json .loads (result .stdout )
4148 except subprocess .CalledProcessError as e :
42- print (f"Error running command { ' ' .join (cmd )} : { e } " )
43- print (f"stderr: { e .stderr } " )
49+ console . print (f"[red]✗ Error running command { ' ' .join (cmd )} :[/red] { e } " )
50+ console . print (f"[red] stderr:[/red] { e .stderr } " )
4451 sys .exit (1 )
4552 except json .JSONDecodeError as e :
46- print (f"Error parsing JSON from command { ' ' .join (cmd )} : { e } " )
53+ console .print (
54+ f"[red]✗ Error parsing JSON from command { ' ' .join (cmd )} :[/red] { e } "
55+ )
4756 sys .exit (1 )
4857
4958
@@ -61,7 +70,9 @@ def get_coder_api_config() -> tuple[str, str]:
6170 # Get token from environment (try CODER_TOKEN or CODER_SESSION_TOKEN)
6271 session_token = os .getenv ("CODER_TOKEN" ) or os .getenv ("CODER_SESSION_TOKEN" )
6372 if not session_token :
64- print ("Error: CODER_TOKEN or CODER_SESSION_TOKEN environment variable not set" )
73+ console .print (
74+ "[red]✗ Error: CODER_TOKEN or CODER_SESSION_TOKEN environment variable not set[/red]"
75+ )
6576 sys .exit (1 )
6677
6778 return api_url , session_token
@@ -94,7 +105,9 @@ def fetch_workspace_builds(
94105 response .raise_for_status ()
95106 return response .json ()
96107 except requests .RequestException as e :
97- print (f"Warning: Failed to fetch builds for workspace { workspace_id } : { e } " )
108+ console .print (
109+ f"[yellow]⚠ Warning: Failed to fetch builds for workspace { workspace_id } :[/yellow] { e } "
110+ )
98111 return []
99112
100113
@@ -143,7 +156,9 @@ def calculate_build_usage_hours(build: dict[str, Any]) -> float:
143156
144157 return 0.0
145158 except Exception as e :
146- print (f"Warning: Error calculating build usage hours: { e } " )
159+ console .print (
160+ f"[yellow]⚠ Warning: Error calculating build usage hours:[/yellow] { e } "
161+ )
147162 return 0.0
148163
149164
@@ -191,6 +206,7 @@ def fetch_user_activity_insights(
191206 headers = {"Coder-Session-Token" : session_token }
192207 params = {"start_time" : start_time , "end_time" : end_time }
193208
209+ response = None
194210 try :
195211 response = requests .get (url , headers = headers , params = params , timeout = 30 )
196212 response .raise_for_status ()
@@ -207,10 +223,12 @@ def fetch_user_activity_insights(
207223
208224 return activity_map
209225 except requests .RequestException as e :
210- print (f"Warning: Failed to fetch user activity insights: { e } " )
226+ console .print (
227+ f"[yellow]⚠ Warning: Failed to fetch user activity insights:[/yellow] { e } "
228+ )
211229 try :
212230 error_details = response .json () if response else {}
213- print (f"Error details: { error_details } " )
231+ console . print (f"[yellow] Error details:[/yellow] { error_details } " )
214232 except Exception :
215233 pass
216234 return {}
@@ -233,7 +251,9 @@ def get_historical_participant_data(bucket_name: str) -> dict[str, dict[str, Any
233251 'last_name': str | None
234252 }
235253 """
236- print ("Fetching historical participant data from previous snapshot..." )
254+ console .print (
255+ "[cyan]Fetching historical participant data from previous snapshot...[/cyan]"
256+ )
237257
238258 try :
239259 storage_client = storage .Client ()
@@ -244,20 +264,20 @@ def get_historical_participant_data(bucket_name: str) -> dict[str, dict[str, Any
244264 blobs = list (bucket .list_blobs (prefix = "snapshots/" ))
245265
246266 if not blobs :
247- print (" No previous snapshots found" )
267+ console . print (" [yellow] No previous snapshots found[/yellow] " )
248268 return {}
249269
250270 # Sort by name (which includes timestamp) to get most recent
251271 snapshot_blobs = [b for b in blobs if b .name .endswith (".json" )]
252272 if not snapshot_blobs :
253- print (" No snapshot JSON files found" )
273+ console . print (" [yellow] No snapshot JSON files found[/yellow] " )
254274 return {}
255275
256276 # Get the most recent snapshot (last in sorted order)
257277 snapshot_blobs .sort (key = lambda b : b .name )
258278 latest_blob = snapshot_blobs [- 1 ]
259279
260- print (f" Using previous snapshot: { latest_blob .name } " )
280+ console . print (f" [dim] Using previous snapshot: { latest_blob .name } [/dim] " )
261281
262282 content = latest_blob .download_as_text ()
263283 snapshot = json .loads (content )
@@ -277,10 +297,14 @@ def get_historical_participant_data(bucket_name: str) -> dict[str, dict[str, Any
277297 "last_name" : last_name ,
278298 }
279299
280- print (f"✓ Loaded historical data for { len (historical_data )} participants" )
300+ console .print (
301+ f"[green]✓[/green] Loaded historical data for { len (historical_data )} participants"
302+ )
281303 return historical_data
282304 except Exception as e :
283- print (f" Warning: Could not load historical data: { e } " )
305+ console .print (
306+ f" [yellow]⚠ Warning: Could not load historical data:[/yellow] { e } "
307+ )
284308 return {}
285309
286310
@@ -296,7 +320,7 @@ def get_participant_mappings() -> dict[str, dict[str, Any]]:
296320 'last_name': str | None
297321 }
298322 """
299- print ("Fetching current participant data from Firestore..." )
323+ console . print ("[cyan] Fetching current participant data from Firestore...[/cyan] " )
300324
301325 project_id = "coderd"
302326 database_id = "onboarding"
@@ -316,7 +340,9 @@ def get_participant_mappings() -> dict[str, dict[str, Any]]:
316340 "last_name" : data .get ("last_name" ),
317341 }
318342
319- print (f"✓ Loaded { len (mappings )} current participant mappings" )
343+ console .print (
344+ f"[green]✓[/green] Loaded { len (mappings )} current participant mappings"
345+ )
320346 return mappings
321347
322348
@@ -340,7 +366,7 @@ def merge_participant_data(
340366 dict[str, dict[str, Any]]
341367 Merged participant data with historical preservation
342368 """
343- print ("Merging historical and current participant data..." )
369+ console . print ("[cyan] Merging historical and current participant data...[/cyan] " )
344370
345371 # Start with historical data (preserves deleted participants)
346372 merged = historical_data .copy ()
@@ -349,11 +375,11 @@ def merge_participant_data(
349375 for handle , data in current_data .items ():
350376 merged [handle ] = data
351377
352- print (f"✓ Merged data: { len (merged )} total participants" )
353- print (
354- f" - Historical only (deleted): { len (set (historical_data .keys ()) - set (current_data .keys ()))} "
378+ console . print (f"[green]✓[/green] Merged data: { len (merged )} total participants" )
379+ console . print (
380+ f" [dim] Historical only (deleted):[/dim] { len (set (historical_data .keys ()) - set (current_data .keys ()))} "
355381 )
356- print (f" - Current (active): { len (current_data )} " )
382+ console . print (f" [dim] Current (active):[/dim] { len (current_data )} " )
357383
358384 return merged
359385
@@ -377,7 +403,7 @@ def fetch_workspaces(
377403 list[dict[str, Any]]
378404 List of workspace objects with builds, usage hours, active hours, and team data
379405 """
380- print ("Fetching workspaces from Coder..." )
406+ console . print ("[cyan] Fetching workspaces from Coder...[/cyan] " )
381407 workspaces = run_command (["coder" , "list" , "-a" , "-o" , "json" ])
382408
383409 # Teams to exclude from analytics
@@ -400,16 +426,16 @@ def fetch_workspaces(
400426
401427 filtered_count = original_count - len (filtered_workspaces )
402428 if filtered_count > 0 :
403- print (
404- f"✓ Filtered out { filtered_count } workspaces from excluded teams: { ', ' .join (excluded_teams )} "
429+ console . print (
430+ f"[green]✓[/green] Filtered out { filtered_count } workspaces from excluded teams: { ', ' .join (excluded_teams )} "
405431 )
406432
407- print (f"✓ Fetched { len (filtered_workspaces )} workspaces" )
433+ console . print (f"[green]✓[/green] Fetched { len (filtered_workspaces )} workspaces" )
408434
409435 # Fetch user activity insights (active hours)
410436 # Use a wide time range to capture all activity
411437 # Find earliest workspace creation date
412- print ("Fetching user activity insights..." )
438+ console . print ("[cyan] Fetching user activity insights...[/cyan] " )
413439 earliest_created = min (
414440 (
415441 datetime .fromisoformat (ws .get ("created_at" , "" ).replace ("Z" , "+00:00" ))
@@ -431,10 +457,14 @@ def fetch_workspaces(
431457 activity_map = fetch_user_activity_insights (
432458 api_url , session_token , start_time , end_time
433459 )
434- print (f"✓ Fetched activity data for { len (activity_map )} users" )
460+ console .print (
461+ f"[green]✓[/green] Fetched activity data for { len (activity_map )} users"
462+ )
435463
436464 # Enrich workspaces with full build history and usage hours
437- print ("Enriching workspaces with build history and active hours..." )
465+ console .print (
466+ "[cyan]Enriching workspaces with build history and active hours...[/cyan]"
467+ )
438468 for i , workspace in enumerate (filtered_workspaces , 1 ):
439469 workspace_id = workspace .get ("id" )
440470 if workspace_id :
@@ -463,17 +493,19 @@ def fetch_workspaces(
463493
464494 # Progress indicator
465495 if i % 10 == 0 :
466- print (f" Processed { i } /{ len (filtered_workspaces )} workspaces..." )
496+ console .print (
497+ f" [dim]Processed { i } /{ len (filtered_workspaces )} workspaces...[/dim]"
498+ )
467499
468- print (
469- f"✓ Enriched { len (filtered_workspaces )} workspaces with build history and active hours"
500+ console . print (
501+ f"[green]✓[/green] Enriched { len (filtered_workspaces )} workspaces with build history and active hours"
470502 )
471503 return filtered_workspaces
472504
473505
474506def fetch_templates () -> list [dict [str , Any ]]:
475507 """Fetch all templates using Coder CLI."""
476- print ("Fetching templates from Coder..." )
508+ console . print ("[cyan] Fetching templates from Coder...[/cyan] " )
477509 templates_raw = run_command (["coder" , "templates" , "list" , "-o" , "json" ])
478510
479511 # Unwrap the "Template" object from each item
@@ -487,7 +519,7 @@ def fetch_templates() -> list[dict[str, Any]]:
487519 # Filter out kubernetes-gpu template
488520 templates = [t for t in templates if t .get ("name" ) != "kubernetes-gpu" ]
489521
490- print (f"✓ Fetched { len (templates )} templates" )
522+ console . print (f"[green]✓[/green] Fetched { len (templates )} templates" )
491523 return templates
492524
493525
@@ -503,7 +535,7 @@ def create_snapshot(
503535 "templates" : templates ,
504536 }
505537
506- print (f"✓ Created snapshot at { timestamp } " )
538+ console . print (f"[green]✓[/green] Created snapshot at { timestamp } " )
507539 return snapshot
508540
509541
@@ -513,24 +545,26 @@ def ensure_bucket_exists(bucket_name: str) -> storage.Bucket:
513545
514546 try :
515547 bucket = client .get_bucket (bucket_name )
516- print (f"✓ Bucket '{ bucket_name } ' exists" )
548+ console . print (f"[green]✓[/green] Bucket '{ bucket_name } ' exists" )
517549 return bucket
518550 except Exception :
519- print (f"Bucket '{ bucket_name } ' doesn't exist, creating..." )
551+ console . print (f"[cyan] Bucket '{ bucket_name } ' doesn't exist, creating...[/cyan] " )
520552 bucket = client .create_bucket (bucket_name )
521553
522554 # Set lifecycle policy to delete objects older than 90 days
523555 lifecycle_rule = {"action" : {"type" : "Delete" }, "condition" : {"age" : 90 }}
524556 bucket .lifecycle_rules = [lifecycle_rule ]
525557 bucket .patch ()
526558
527- print (f"✓ Created bucket '{ bucket_name } ' with 90-day lifecycle policy" )
559+ console .print (
560+ f"[green]✓[/green] Created bucket '{ bucket_name } ' with 90-day lifecycle policy"
561+ )
528562 return bucket
529563
530564
531565def upload_to_gcs (snapshot : dict [str , Any ], bucket_name : str ) -> None :
532566 """Upload snapshot to GCS bucket."""
533- print (f"Uploading snapshot to gs://{ bucket_name } /..." )
567+ console . print (f"[cyan] Uploading snapshot to gs://{ bucket_name } /...[/cyan] " )
534568
535569 # Ensure bucket exists
536570 bucket = ensure_bucket_exists (bucket_name )
@@ -544,14 +578,14 @@ def upload_to_gcs(snapshot: dict[str, Any], bucket_name: str) -> None:
544578
545579 blob = bucket .blob (timestamp_filename )
546580 blob .upload_from_string (snapshot_json , content_type = "application/json" )
547- print (f"✓ Uploaded to { timestamp_filename } " )
581+ console . print (f"[green]✓[/green] Uploaded to { timestamp_filename } " )
548582
549583 # Update latest.json
550584 latest_blob = bucket .blob ("latest.json" )
551585 latest_blob .upload_from_string (snapshot_json , content_type = "application/json" )
552- print ("✓ Updated latest.json" )
586+ console . print ("[green]✓[/green] Updated latest.json" )
553587
554- print ("\n ✓ Successfully uploaded snapshot to GCS" )
588+ console . print ("\n [green] ✓ Successfully uploaded snapshot to GCS[/green] " )
555589
556590
557591def save_local_copy (
@@ -560,23 +594,23 @@ def save_local_copy(
560594 """Save a local copy of the snapshot for debugging."""
561595 with open (output_path , "w" ) as f :
562596 json .dump (snapshot , f , indent = 2 )
563- print (f"✓ Saved local copy to { output_path } " )
597+ console . print (f"[green]✓[/green] Saved local copy to { output_path } " )
564598
565599
566600def main () -> None :
567601 """Execute the main workflow."""
568- print ("=" * 60 )
569- print ("Coder Analytics Collection Script" )
570- print ("=" * 60 )
571- print ()
602+ console . print ("[bold cyan]" + " =" * 60 + "[/bold cyan]" )
603+ console . print ("[bold cyan] Coder Analytics Collection Script[/bold cyan] " )
604+ console . print ("[bold cyan]" + " =" * 60 + "[/bold cyan]" )
605+ console . print ()
572606
573607 # Configuration
574608 bucket_name = "coder-analytics-snapshots"
575609 save_local = "--local" in sys .argv
576610
577611 # Get Coder API configuration
578612 api_url , session_token = get_coder_api_config ()
579- print (f"✓ Using Coder API: { api_url } " )
613+ console . print (f"[green]✓[/green] Using Coder API: { api_url } " )
580614
581615 # Fetch participant data from multiple sources and merge
582616 # Historical data preserves team assignments for deleted participants
@@ -598,17 +632,17 @@ def main() -> None:
598632 # Upload to GCS
599633 upload_to_gcs (snapshot , bucket_name )
600634
601- print ()
602- print ("=" * 60 )
603- print ("✓ Collection complete!" )
604- print ("=" * 60 )
635+ console . print ()
636+ console . print ("[bold green]" + " =" * 60 + "[/bold green]" )
637+ console . print ("[bold green] ✓ Collection complete![/bold green] " )
638+ console . print ("[bold green]" + " =" * 60 + "[/bold green]" )
605639
606640 # Print summary
607- print ("\n Summary: " )
608- print (f" Workspaces: { len (workspaces )} " )
609- print (f" Templates: { len (templates )} " )
610- print (f" Timestamp: { snapshot ['timestamp' ]} " )
611- print (f" Bucket: gs://{ bucket_name } /" )
641+ console . print ("\n [bold]Summary:[/bold] " )
642+ console . print (f" [cyan] Workspaces:[/cyan] { len (workspaces )} " )
643+ console . print (f" [cyan] Templates:[/cyan] { len (templates )} " )
644+ console . print (f" [cyan] Timestamp:[/cyan] { snapshot ['timestamp' ]} " )
645+ console . print (f" [cyan] Bucket:[/cyan] gs://{ bucket_name } /" )
612646
613647
614648if __name__ == "__main__" :
0 commit comments