38
38
{'notebookRuntimeName' : 'ArcGIS Notebook Python 3 Advanced' ,
39
39
'notebookRuntimeVersion' : '5.0' }
40
40
NB_ITEM_PROPERTIES_RUNTIME_STAMP_ADVANCED_GPU = \
41
- {'notebookRuntimeName' : 'ArcGIS Notebook Python 3 Advanced with GPU support' ,
42
- 'notebookRuntimeVersion' : '5.0' }
41
+ {'notebookRuntimeName' : 'ArcGIS Notebook Python 3 Advanced with GPU support' ,
42
+ 'notebookRuntimeVersion' : '5.0' }
43
43
NB_ITEM_PROPERTIES_RUNTIME_STAMP_STANDARD = \
44
44
{'notebookRuntimeName' : 'ArcGIS Notebook Python 3 Standard' ,
45
45
'notebookRuntimeVersion' : '5.0' }
46
46
NB_ITEM_FOLDER = "Notebook Samples"
47
47
48
+
48
49
def _main ():
49
50
"""Parses arguments, connects to GIS, reads YAML, uploads NBs"""
50
51
args = _parse_cmd_line_args ()
@@ -59,95 +60,101 @@ def _main():
59
60
if s .failed_uploads :
60
61
raise Exception (f"Some uploads failed: { s .failed_uploads } " )
61
62
63
+
62
64
def _parse_cmd_line_args ():
63
65
"""Parse CMD args, returns an object instance of all user passed in args"""
64
- parser = argparse .ArgumentParser (description = "Takes all notebooks " \
65
- "this in `gallery` directory, and will upload it to the specified " \
66
- "portal/org in the right group with the right categories. " \
67
- "(default is geosaurus.maps.arcgis.com, 'Esri Sample Notebooks' group)" ,
68
- formatter_class = argparse .RawTextHelpFormatter )
66
+ parser = argparse .ArgumentParser (description = "Takes all notebooks "
67
+ "this in `gallery` directory, and will upload it to the specified "
68
+ "portal/org in the right group with the right categories. "
69
+ "(default is geosaurus.maps.arcgis.com, 'Esri Sample Notebooks' group)" ,
70
+ formatter_class = argparse .RawTextHelpFormatter )
69
71
parser .add_argument ("--username" , "-u" , type = str ,
70
- help = "Required username for the portal/org" )
72
+ help = "Required username for the portal/org" )
71
73
parser .add_argument ("--password" , "-p" , type = str ,
72
- help = "Required password for the portal/org" )
74
+ help = "Required password for the portal/org" )
73
75
parser .add_argument ("--portal-url" , "-r" , type = str ,
74
- help = "The portal to connect to (Default:geosaurus.maps.arcgis.com)" ,
75
- default = "https://geosaurus.maps.arcgis.com/" )
76
+ help = "The portal to connect to (Default:geosaurus.maps.arcgis.com)" ,
77
+ default = "https://geosaurus.maps.arcgis.com/" )
76
78
parser .add_argument ("--verbose" , "-v" , action = "store_true" ,
77
- help = "Print all DEBUG log messages instead of just INFO" )
79
+ help = "Print all DEBUG log messages instead of just INFO" )
78
80
parser .add_argument ("--replace-profiles" , "-c" , action = "store_true" ,
79
- help = "Replace all profiles in notebooks with their appropriate username " \
80
- "and passwords. Does this by running misc/tools/replace_profiles.py" )
81
- args = parser .parse_args (sys .argv [1 :]) # don't use filename as 1st arg
81
+ help = "Replace all profiles in notebooks with their appropriate username "
82
+ "and passwords. Does this by running misc/tools/replace_profiles.py" )
83
+ args = parser .parse_args (sys .argv [1 :]) # don't use filename as 1st arg
82
84
return args
83
85
86
+
84
87
def _setup_logging (args ):
85
88
"""Sets up the logging based on args"""
86
89
if args .verbose :
87
90
log .setLevel (logging .DEBUG )
88
91
else :
89
- log .setLevel (logging .INFO )
92
+ log .setLevel (logging .INFO )
90
93
stdout_handler = logging .StreamHandler (stream = sys .stdout )
91
94
stdout_handler .setLevel (logging .DEBUG )
92
95
stdout_handler .setFormatter (logging .Formatter (
93
- '----- %(levelname)s | ' \
94
- '%(asctime)s | ' \
95
- '%(filename)s line %(lineno)d' \
96
- ' -----\n ' \
96
+ '----- %(levelname)s | '
97
+ '%(asctime)s | '
98
+ '%(filename)s line %(lineno)d'
99
+ ' -----\n '
97
100
'"%(message)s"' ))
98
101
log .addHandler (stdout_handler )
99
102
log .info ("Logging at level {}." .format (logging .getLevelName (log .level )))
100
103
log .debug ("args passed in => {}" .format (args ))
101
104
105
+
102
106
def _read_items_metadata_yaml ():
103
107
"""Returns the items_metadata.yaml file as a dict"""
104
108
with open (ITEMS_METADATA_YAML_PATH ) as f :
105
109
return yaml .safe_load (f )
106
110
111
+
107
112
def _replace_profiles ():
108
113
"""Runs misc/tools/replace_profiles.py to go through each notebook in the
109
114
repo and replace profiles with usernames/passwords
110
115
"""
111
116
cmd = f"{ sys .executable } { REPLACE_PROFILES_SCRIPT } "
112
117
os .system (cmd )
113
118
119
+
114
120
class ItemsUploader :
115
121
def __init__ (self , gis , items_metadata_yaml ):
116
122
self ._gis = gis
117
123
self ._items_metadata_yaml = items_metadata_yaml
118
124
self .failed_uploads = []
119
125
120
- def upload_items (self , share_after_upload = True ):
126
+ def upload_items (self , share_after_upload = True ):
121
127
for entry in self ._items_metadata_yaml ["samples" ] + \
122
- self ._items_metadata_yaml ["guides" ] + \
123
- self ._items_metadata_yaml ["labs" ]:
128
+ self ._items_metadata_yaml ["guides" ] + \
129
+ self ._items_metadata_yaml ["labs" ]:
124
130
self ._stage_and_upload_item (entry , share_after_upload )
125
131
126
- def _stage_and_upload_item (self , entry , share_after_upload = True ):
132
+ def _stage_and_upload_item (self , entry , share_after_upload = True ):
127
133
log .info (f"Uploading { entry ['title' ]} " )
128
134
log .debug (f" sample: { entry } " )
129
135
try :
130
136
nb_path = entry ["path" ]
131
137
self ._preupload_check (entry ['title' ], nb_path )
132
- runtime_stamp = self ._infer_runtime_stamp (entry .get ("runtime" , "standard" ))
138
+ runtime_stamp = self ._infer_runtime_stamp (
139
+ entry .get ("runtime" , "standard" ))
133
140
categories = entry .get ("categories" , None )
134
141
self ._stamp_file_with_runtime (nb_path , runtime_stamp )
135
142
item_id = self ._infer_item_id (entry ["url" ])
136
143
item = self .update_item (
137
- item_id = item_id ,
138
- item_type = NB_PORTAL_TYPE ,
139
- item_type_keywords = NB_PORTAL_TYPE_KEYWORDS ,
140
- title = entry ['title' ],
141
- categories = categories ,
142
- snippet = entry ['snippet' ],
143
- description = entry ['description' ],
144
- license_info = entry ['licenseInfo' ],
145
- tags = entry ['tags' ],
146
- nb_path = nb_path ,
147
- runtime_stamp = runtime_stamp ,
148
- thumbnail = entry ['thumbnail' ])
144
+ item_id = item_id ,
145
+ item_type = NB_PORTAL_TYPE ,
146
+ item_type_keywords = NB_PORTAL_TYPE_KEYWORDS ,
147
+ title = entry ['title' ],
148
+ categories = categories ,
149
+ snippet = entry ['snippet' ],
150
+ description = entry ['description' ],
151
+ license_info = entry ['licenseInfo' ],
152
+ tags = entry ['tags' ],
153
+ nb_path = nb_path ,
154
+ runtime_stamp = runtime_stamp ,
155
+ thumbnail = entry ['thumbnail' ])
149
156
if share_after_upload :
150
- item .share (everyone = True )
157
+ item .share (everyone = True )
151
158
item .protect ()
152
159
if categories :
153
160
self ._assign_categories_to_item (item , categories )
@@ -171,11 +178,11 @@ def update_item(self, item_id, item_type, item_type_keywords, title, categories,
171
178
snippet , description , license_info , tags , nb_path ,
172
179
runtime_stamp , thumbnail ):
173
180
"""Actually uploads the notebook item to the portal"""
174
- item_properties = {"title" : title ,
175
- "snippet" : snippet ,
176
- "description" : description ,
177
- "licenseInfo" : license_info ,
178
- "tags" : tags ,
181
+ item_properties = {"title" : title ,
182
+ "snippet" : snippet ,
183
+ "description" : description ,
184
+ "licenseInfo" : license_info ,
185
+ "tags" : tags ,
179
186
"properties" : runtime_stamp }
180
187
if categories :
181
188
item_properties ["categories" ] = categories
@@ -189,11 +196,12 @@ def update_item(self, item_id, item_type, item_type_keywords, title, categories,
189
196
log .debug (f'item { existing_item .homepage } exists, updating...' )
190
197
item_properties ["url" ] = existing_item .homepage
191
198
existing_item .update (item_properties ,
192
- data = nb_path ,
193
- thumbnail = thumbnail )
199
+ data = nb_path ,
200
+ thumbnail = thumbnail )
194
201
resp = existing_item
195
202
else :
196
- raise Exception (f"Could not find item { item_id } to update. Failing!" )
203
+ raise Exception (
204
+ f"Could not find item { item_id } to update. Failing!" )
197
205
return resp
198
206
199
207
def _assign_categories_to_item (self , item , categories ):
@@ -209,7 +217,7 @@ def _apply_html_preview_to_item(self, item, nb_path):
209
217
json_file_name = "notebook_preview.json"
210
218
json_file_path = os .path .join ("." , json_file_name )
211
219
with open (json_file_path , 'w' ) as f :
212
- json .dump ({"html" : html_str }, f )
220
+ json .dump ({"html" : html_str }, f )
213
221
214
222
if item .resources .list ():
215
223
item .resources .remove ()
@@ -240,6 +248,7 @@ def _stamp_file_with_runtime(self, notebook_file_path, runtime_stamp):
240
248
nb ['metadata' ]['esriNotebookRuntime' ] = runtime_stamp
241
249
nbformat .write (nb , notebook_file_path , nbformat .NO_CONVERT )
242
250
251
+
243
252
if __name__ == "__main__" :
244
253
try :
245
254
_main ()
0 commit comments