11#!/usr/bin/python3
22
33#
4- # Copyright 2018 Southern California Linux Expo
4+ # Copyright 2018-present Southern California Linux Expo
55#
66# Licensed under the Apache License, Version 2.0 (the "License");
77# you may not use this file except in compliance with the License.
1818
1919#
2020# Author:: Phil Dibowitz <phil@ipm.com>
21- # This is quick-n-dirty script to import a CSV export from the SCALE
22- # website into Guidebook. By default it'll add only what's missing, but
23- # can optionally update all existing sessions.
21+ #
22+ # Script to sync the website schedule to Guidebook complete with region
23+ # mapping.
24+ #
25+ # By default it'll add only what's missing, but can optionally update all
26+ # existing sessions.
2427#
2528# It automatically setups rooms ("Locations") and tracks. It has a hard-coded
2629# map of colors in the Guidebook class, so if you change tracks you'll need
2730# to update that.
2831#
2932
33+ from datetime import datetime
3034import click
31- import csv
35+ import json
3236import logging
37+ import os
38+ import pytz
39+ import re
3340import requests
3441import sys
35- import pytz
36- from datetime import datetime
3742
38- DBASE_DEFAULT = "/tmp/presentation_exporter_event_1967.csv "
43+ DBASE_DEFAULT = "https://www.socallinuxexpo.org/scale/23x/app "
3944GUIDE_NAME = "SCaLE 23x"
4045
4146
42- class OurCSV :
47+ class OurJSON :
4348 rooms = set ()
4449 tracks = set ()
4550 sessions = set ()
4651
4752 FIELD_MAPPING = {
48- "tracks" : "Session Track" ,
49- "rooms" : "Room/ Location" ,
53+ "tracks" : "Track" ,
54+ "rooms" : "Location" ,
5055 }
5156
52- def __init__ (self , dbase , logger ):
57+ def __init__ (self , path , logger ):
5358 self .logger = logger
54- self .sessions = self .load_csv (dbase )
59+ if path .startswith ("http://" ) or path .startswith ("https://" ):
60+ response = requests .get (path )
61+ blob = response .text
62+ self .sessions = self .load_json (blob )
63+ else :
64+ blob = open (path , "r" ).read ()
65+ self .session = self .load_json (blob )
5566
56- def load_csv (self , filename ):
57- self .logger .info ("Loading CSV file" )
67+ def load_json (self , raw ):
68+ self .logger .info ("Loading JSON file" )
69+ raw = json .loads (raw )
5870 data = []
59- with open (filename , "r" , encoding = "utf-8" ) as csvfile :
60- reader = csv .DictReader (csvfile , delimiter = "," , quotechar = '"' )
61- for row in reader :
62- track = row [self .FIELD_MAPPING ["tracks" ]]
63- room = row [self .FIELD_MAPPING ["rooms" ]]
64- if track != "" :
65- self .tracks .add (track )
66- if room != "" :
67- self .rooms .add (room )
68- data .append (row )
71+ for session in raw :
72+ track = session [self .FIELD_MAPPING ["tracks" ]].strip ()
73+ room = session [self .FIELD_MAPPING ["rooms" ]].strip ()
74+ if track != "" :
75+ self .tracks .add (track )
76+ if room != "" :
77+ self .rooms .add (room )
78+ clean_session = {k : v .strip () for k , v in session .items ()}
79+ data .append (clean_session )
6980 return data
7081
7182
@@ -366,28 +377,29 @@ def setup_x_map_regions(self):
366377
367378 self .add_x_map_region (map_region , update , rid , location_id )
368379
369- def to_utc (self , ts ):
370- loc_dt = datetime .strptime (ts , "%Y-%m-%d %H:%M" )
371- pt_dt = pytz .timezone ("America/Los_Angeles" ).localize (loc_dt )
380+ def to_utc (self , ts , fmt ):
381+ loc_dt = datetime .strptime (ts , fmt )
382+ if not fmt .endswith ("%z" ):
383+ pt_dt = pytz .timezone ("America/Los_Angeles" ).localize (loc_dt )
384+ else :
385+ pt_dt = loc_dt
372386 return pt_dt .astimezone (pytz .utc )
373387
374388 def get_times (self , session ):
375389 """
376390 Helper function to build times for guidebook.
377391 """
378- d = session ["Date" ].split ()[1 ]
379- month , date , year = d .split ("/" )
380-
381- start_ts = "%s-%s-%s %s" % (year , month , date , session ["Time Start" ])
382392
383- end_ts = "%s-%s-%s %s" % (year , month , date , session ["Time End" ])
384- return (self .to_utc (start_ts ), self .to_utc (end_ts ))
393+ fmt = "%Y-%m-%dT%H:%M:%S%z"
394+ start_ts = session ["StartTime" ]
395+ end_ts = session ["Time End" ]
396+ return (self .to_utc (start_ts , fmt ), self .to_utc (end_ts , fmt ))
385397
386398 def get_id (self , thing , session ):
387399 """
388400 Get the ID for <thing> where thing is a room or track
389401 """
390- key = OurCSV .FIELD_MAPPING [thing ]
402+ key = OurJSON .FIELD_MAPPING [thing ]
391403 if session [key ] == "" :
392404 return []
393405 self .logger .debug (
@@ -410,14 +422,14 @@ def add_session(self, session, update, sid=None):
410422 """
411423 if update and not self .update :
412424 return
413- name = session ["Session Title " ]
425+ name = session ["Name " ]
414426 start , end = self .get_times (session )
415427 data = {
416428 "name" : name ,
417429 "start_time" : start ,
418430 "end_time" : end ,
419431 "guide" : self .guide ,
420- "description_html" : "<p>%s</p>" % session ["Description " ],
432+ "description_html" : "<p>%s</p>" % session ["LongAbstract " ],
421433 "schedule_tracks" : self .get_id ("tracks" , session ),
422434 "locations" : self .get_id ("rooms" , session ),
423435 "add_to_schedule" : True ,
@@ -432,10 +444,10 @@ def setup_sessions(self, sessions):
432444 Add all rooms passed in if missing.
433445 """
434446 for session in sessions :
435- name = session ["Session Title " ]
447+ name = session ["Name " ]
436448 update = False
437449 sid = None
438- if session ["Date " ] == "" :
450+ if session ["StartTime " ] == "" :
439451 self .logger .warning ("Skipping %s - no date" % name )
440452 continue
441453 if name in self .sessions :
@@ -495,20 +507,24 @@ def delete_all(self):
495507 self .delete_rooms ()
496508
497509 def publish_updates (self ):
510+ self .logger .info ("Publishing changes" )
498511 response = requests .post (
499512 self .URLS ["publish" ].format (guide = self .guide ),
500513 headers = self .x_headers ,
501514 )
502515
503- if response .status_code == 204 :
516+ if response .status_code == 202 :
517+ self .logger .debug ("Publish accepted" )
504518 return
505519
506- if (
507- response .status_code == 403
508- and "no new content" in resp .text .lower ()
509- ):
510- self .logger .info ("No changes to publish" )
511- return
520+ if response .status_code == 403 :
521+ resp_text = response .text .lower ()
522+ if "no new content" in resp_text :
523+ self .logger .debug ("No changes to publish" )
524+ return
525+ elif "currently publishing" in resp_text :
526+ self .logger .debug ("Guidebook is already publishing" )
527+ return
512528
513529 self .logger .error ("Failed to publish" )
514530 self .logger .error ("Status: %s" % response .status_code )
@@ -531,20 +547,24 @@ def publish_updates(self):
531547 default = False ,
532548 help = "Delete all tracks, rooms, and sessions" ,
533549)
534- @click .option ("--csv-file" , default = DBASE_DEFAULT , help = "CSV file to use." )
550+ @click .option (
551+ "--feed" ,
552+ default = DBASE_DEFAULT ,
553+ help = "JSON file or http(s) URL to JSON data." ,
554+ )
535555@click .option (
536556 "--api-file" ,
537557 "-a" ,
538- default = "guidebook_api.txt " ,
558+ default = f" { os . environ [ 'HOME' ] } /.guidebook-token " ,
539559 help = "File to read API key from" ,
540560)
541561@click .option (
542562 "--x-api-file" ,
543563 "-x" ,
544- default = "guidebook_api_x.txt " ,
564+ default = f" { os . environ [ 'HOME' ] } /.guidebook-jwt-token " ,
545565 help = "File to read API key from" ,
546566)
547- def main (debug , update , delete_all , csv_file , api_file , x_api_file ):
567+ def main (debug , update , delete_all , feed , api_file , x_api_file ):
548568 level = logging .INFO
549569 if debug :
550570 level = logging .DEBUG
@@ -570,7 +590,7 @@ def main(debug, update, delete_all, csv_file, api_file, x_api_file):
570590 print ("into a schedule to lose all of that work." )
571591 click .confirm ("ARE YOU FUCKING SURE?!" , abort = True )
572592 else :
573- ourdata = OurCSV ( csv_file , logger )
593+ ourdata = OurJSON ( feed , logger )
574594
575595 ourguide = GuideBook (logger , update , key , x_key = x_key )
576596 if delete_all :
0 commit comments