Skip to content

Commit 8302dbc

Browse files
authored
Merge pull request #2128 from Esri/NA/guides_11
Update several samples for 2.4 syntax
2 parents 3285c2e + 5c18da5 commit 8302dbc

File tree

3 files changed

+78
-415
lines changed

3 files changed

+78
-415
lines changed

samples/04_gis_analysts_data_scientists/solar-energy-prediction-using-weather-variables.ipynb

Lines changed: 30 additions & 21 deletions
Large diffs are not rendered by default.

samples/04_gis_analysts_data_scientists/traffic-light-detection-on-oriented-imagery.ipynb

Lines changed: 14 additions & 115 deletions
Original file line numberDiff line numberDiff line change
@@ -161,24 +161,7 @@
161161
"outputs": [
162162
{
163163
"data": {
164-
"application/javascript": [
165-
"\n",
166-
" setTimeout(function() {\n",
167-
" var nbb_cell_id = 6;\n",
168-
" var nbb_unformatted_code = \"# filepath = oriented_imagery_data.download(save_path = os.getcwd(), file_name=oriented_imagery_data.name)\\nfilepath = \\\"D:\\\\TrafficSignalDataset\\\\sample\\\\sample\\\\oriented_imagery_sample_notebook.zip\\\"\";\n",
169-
" var nbb_formatted_code = \"# filepath = oriented_imagery_data.download(save_path = os.getcwd(), file_name=oriented_imagery_data.name)\\nfilepath = \\\"D:\\\\TrafficSignalDataset\\\\sample\\\\sample\\\\oriented_imagery_sample_notebook.zip\\\"\";\n",
170-
" var nbb_cells = Jupyter.notebook.get_cells();\n",
171-
" for (var i = 0; i < nbb_cells.length; ++i) {\n",
172-
" if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n",
173-
" if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n",
174-
" nbb_cells[i].set_text(nbb_formatted_code);\n",
175-
" }\n",
176-
" break;\n",
177-
" }\n",
178-
" }\n",
179-
" }, 500);\n",
180-
" "
181-
],
164+
"application/javascript": "\n setTimeout(function() {\n var nbb_cell_id = 6;\n var nbb_unformatted_code = \"# filepath = oriented_imagery_data.download(save_path = os.getcwd(), file_name=oriented_imagery_data.name)\\nfilepath = \\\"D:\\\\TrafficSignalDataset\\\\sample\\\\sample\\\\oriented_imagery_sample_notebook.zip\\\"\";\n var nbb_formatted_code = \"# filepath = oriented_imagery_data.download(save_path = os.getcwd(), file_name=oriented_imagery_data.name)\\nfilepath = \\\"D:\\\\TrafficSignalDataset\\\\sample\\\\sample\\\\oriented_imagery_sample_notebook.zip\\\"\";\n var nbb_cells = Jupyter.notebook.get_cells();\n for (var i = 0; i < nbb_cells.length; ++i) {\n if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n nbb_cells[i].set_text(nbb_formatted_code);\n }\n break;\n }\n }\n }, 500);\n ",
182165
"text/plain": [
183166
"<IPython.core.display.Javascript object>"
184167
]
@@ -221,24 +204,7 @@
221204
"outputs": [
222205
{
223206
"data": {
224-
"application/javascript": [
225-
"\n",
226-
" setTimeout(function() {\n",
227-
" var nbb_cell_id = 7;\n",
228-
" var nbb_unformatted_code = \"data_path = Path(os.path.join(os.path.splitext(filepath)[0]), \\\"street_view_data\\\")\\nimage_meta_data = Path(os.path.join(os.path.splitext(filepath)[0]), \\\"oriented_imagery_meta_data.csv\\\")\\ndepth_image_path = Path(os.path.join(os.path.splitext(filepath)[0]), \\\"saved_depth_image\\\")\";\n",
229-
" var nbb_formatted_code = \"data_path = Path(os.path.join(os.path.splitext(filepath)[0]), \\\"street_view_data\\\")\\nimage_meta_data = Path(\\n os.path.join(os.path.splitext(filepath)[0]), \\\"oriented_imagery_meta_data.csv\\\"\\n)\\ndepth_image_path = Path(\\n os.path.join(os.path.splitext(filepath)[0]), \\\"saved_depth_image\\\"\\n)\";\n",
230-
" var nbb_cells = Jupyter.notebook.get_cells();\n",
231-
" for (var i = 0; i < nbb_cells.length; ++i) {\n",
232-
" if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n",
233-
" if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n",
234-
" nbb_cells[i].set_text(nbb_formatted_code);\n",
235-
" }\n",
236-
" break;\n",
237-
" }\n",
238-
" }\n",
239-
" }, 500);\n",
240-
" "
241-
],
207+
"application/javascript": "\n setTimeout(function() {\n var nbb_cell_id = 7;\n var nbb_unformatted_code = \"data_path = Path(os.path.join(os.path.splitext(filepath)[0]), \\\"street_view_data\\\")\\nimage_meta_data = Path(os.path.join(os.path.splitext(filepath)[0]), \\\"oriented_imagery_meta_data.csv\\\")\\ndepth_image_path = Path(os.path.join(os.path.splitext(filepath)[0]), \\\"saved_depth_image\\\")\";\n var nbb_formatted_code = \"data_path = Path(os.path.join(os.path.splitext(filepath)[0]), \\\"street_view_data\\\")\\nimage_meta_data = Path(\\n os.path.join(os.path.splitext(filepath)[0]), \\\"oriented_imagery_meta_data.csv\\\"\\n)\\ndepth_image_path = Path(\\n os.path.join(os.path.splitext(filepath)[0]), \\\"saved_depth_image\\\"\\n)\";\n var nbb_cells = Jupyter.notebook.get_cells();\n for (var i = 0; i < nbb_cells.length; ++i) {\n if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n nbb_cells[i].set_text(nbb_formatted_code);\n }\n break;\n }\n }\n }, 500);\n ",
242208
"text/plain": [
243209
"<IPython.core.display.Javascript object>"
244210
]
@@ -261,24 +227,7 @@
261227
"outputs": [
262228
{
263229
"data": {
264-
"application/javascript": [
265-
"\n",
266-
" setTimeout(function() {\n",
267-
" var nbb_cell_id = 8;\n",
268-
" var nbb_unformatted_code = \"image_path_list = [os.path.join(data_path,image) for image in os.listdir(data_path)]\";\n",
269-
" var nbb_formatted_code = \"image_path_list = [os.path.join(data_path, image) for image in os.listdir(data_path)]\";\n",
270-
" var nbb_cells = Jupyter.notebook.get_cells();\n",
271-
" for (var i = 0; i < nbb_cells.length; ++i) {\n",
272-
" if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n",
273-
" if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n",
274-
" nbb_cells[i].set_text(nbb_formatted_code);\n",
275-
" }\n",
276-
" break;\n",
277-
" }\n",
278-
" }\n",
279-
" }, 500);\n",
280-
" "
281-
],
230+
"application/javascript": "\n setTimeout(function() {\n var nbb_cell_id = 8;\n var nbb_unformatted_code = \"image_path_list = [os.path.join(data_path,image) for image in os.listdir(data_path)]\";\n var nbb_formatted_code = \"image_path_list = [os.path.join(data_path, image) for image in os.listdir(data_path)]\";\n var nbb_cells = Jupyter.notebook.get_cells();\n for (var i = 0; i < nbb_cells.length; ++i) {\n if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n nbb_cells[i].set_text(nbb_formatted_code);\n }\n break;\n }\n }\n }, 500);\n ",
282231
"text/plain": [
283232
"<IPython.core.display.Javascript object>"
284233
]
@@ -315,24 +264,7 @@
315264
"outputs": [
316265
{
317266
"data": {
318-
"application/javascript": [
319-
"\n",
320-
" setTimeout(function() {\n",
321-
" var nbb_cell_id = 9;\n",
322-
" var nbb_unformatted_code = \"yolo = YOLOv3(pretrained_backbone=True)\";\n",
323-
" var nbb_formatted_code = \"yolo = YOLOv3(pretrained_backbone=True)\";\n",
324-
" var nbb_cells = Jupyter.notebook.get_cells();\n",
325-
" for (var i = 0; i < nbb_cells.length; ++i) {\n",
326-
" if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n",
327-
" if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n",
328-
" nbb_cells[i].set_text(nbb_formatted_code);\n",
329-
" }\n",
330-
" break;\n",
331-
" }\n",
332-
" }\n",
333-
" }, 500);\n",
334-
" "
335-
],
267+
"application/javascript": "\n setTimeout(function() {\n var nbb_cell_id = 9;\n var nbb_unformatted_code = \"yolo = YOLOv3(pretrained_backbone=True)\";\n var nbb_formatted_code = \"yolo = YOLOv3(pretrained_backbone=True)\";\n var nbb_cells = Jupyter.notebook.get_cells();\n for (var i = 0; i < nbb_cells.length; ++i) {\n if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n nbb_cells[i].set_text(nbb_formatted_code);\n }\n break;\n }\n }\n }, 500);\n ",
336268
"text/plain": [
337269
"<IPython.core.display.Javascript object>"
338270
]
@@ -373,24 +305,7 @@
373305
"outputs": [
374306
{
375307
"data": {
376-
"application/javascript": [
377-
"\n",
378-
" setTimeout(function() {\n",
379-
" var nbb_cell_id = 10;\n",
380-
" var nbb_unformatted_code = \"def traffic_light_finder(oriented_image_path):\\n flag = 0\\n coordlist = []\\n temp_list = {}\\n out = yolo.predict(oriented_image_path, threshold=0.5)\\n test_img = cv2.imread(oriented_image_path)\\n if len(out[0]) == 0:\\n temp_list[\\\"object\\\"] = False\\n else:\\n for index, (value, label, confidence) in enumerate(zip(out[0], out[1], out[2])):\\n if label == \\\"traffic light\\\":\\n flag = 1\\n coordlist.append(\\n [int(value[0]), int(value[1]), int(value[2]), int(value[3])]\\n )\\n test_img = cv2.rectangle(\\n test_img,\\n (int(value[0]), int(value[1]), int(value[2]), int(value[3])),\\n (0, 0, 255),\\n 10,\\n )\\n textvalue = label + \\\"_\\\" + str(confidence)\\n cv2.putText(\\n test_img,\\n textvalue,\\n (int(value[0]), int(value[1]) - 10),\\n cv2.FONT_HERSHEY_SIMPLEX,\\n 1.5,\\n (0, 0, 255),\\n 2,\\n )\\n if flag == 1:\\n temp_list[\\\"object\\\"] = True\\n temp_list[\\\"coords\\\"] = coordlist\\n temp_list[\\\"assetname\\\"] = \\\"traffic light\\\"\\n return temp_list, test_img\";\n",
381-
" var nbb_formatted_code = \"def traffic_light_finder(oriented_image_path):\\n flag = 0\\n coordlist = []\\n temp_list = {}\\n out = yolo.predict(oriented_image_path, threshold=0.5)\\n test_img = cv2.imread(oriented_image_path)\\n if len(out[0]) == 0:\\n temp_list[\\\"object\\\"] = False\\n else:\\n for index, (value, label, confidence) in enumerate(zip(out[0], out[1], out[2])):\\n if label == \\\"traffic light\\\":\\n flag = 1\\n coordlist.append(\\n [int(value[0]), int(value[1]), int(value[2]), int(value[3])]\\n )\\n test_img = cv2.rectangle(\\n test_img,\\n (int(value[0]), int(value[1]), int(value[2]), int(value[3])),\\n (0, 0, 255),\\n 10,\\n )\\n textvalue = label + \\\"_\\\" + str(confidence)\\n cv2.putText(\\n test_img,\\n textvalue,\\n (int(value[0]), int(value[1]) - 10),\\n cv2.FONT_HERSHEY_SIMPLEX,\\n 1.5,\\n (0, 0, 255),\\n 2,\\n )\\n if flag == 1:\\n temp_list[\\\"object\\\"] = True\\n temp_list[\\\"coords\\\"] = coordlist\\n temp_list[\\\"assetname\\\"] = \\\"traffic light\\\"\\n return temp_list, test_img\";\n",
382-
" var nbb_cells = Jupyter.notebook.get_cells();\n",
383-
" for (var i = 0; i < nbb_cells.length; ++i) {\n",
384-
" if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n",
385-
" if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n",
386-
" nbb_cells[i].set_text(nbb_formatted_code);\n",
387-
" }\n",
388-
" break;\n",
389-
" }\n",
390-
" }\n",
391-
" }, 500);\n",
392-
" "
393-
],
308+
"application/javascript": "\n setTimeout(function() {\n var nbb_cell_id = 10;\n var nbb_unformatted_code = \"def traffic_light_finder(oriented_image_path):\\n flag = 0\\n coordlist = []\\n temp_list = {}\\n out = yolo.predict(oriented_image_path, threshold=0.5)\\n test_img = cv2.imread(oriented_image_path)\\n if len(out[0]) == 0:\\n temp_list[\\\"object\\\"] = False\\n else:\\n for index, (value, label, confidence) in enumerate(zip(out[0], out[1], out[2])):\\n if label == \\\"traffic light\\\":\\n flag = 1\\n coordlist.append(\\n [int(value[0]), int(value[1]), int(value[2]), int(value[3])]\\n )\\n test_img = cv2.rectangle(\\n test_img,\\n (int(value[0]), int(value[1]), int(value[2]), int(value[3])),\\n (0, 0, 255),\\n 10,\\n )\\n textvalue = label + \\\"_\\\" + str(confidence)\\n cv2.putText(\\n test_img,\\n textvalue,\\n (int(value[0]), int(value[1]) - 10),\\n cv2.FONT_HERSHEY_SIMPLEX,\\n 1.5,\\n (0, 0, 255),\\n 2,\\n )\\n if flag == 1:\\n temp_list[\\\"object\\\"] = True\\n temp_list[\\\"coords\\\"] = coordlist\\n temp_list[\\\"assetname\\\"] = \\\"traffic light\\\"\\n return temp_list, test_img\";\n var nbb_formatted_code = \"def traffic_light_finder(oriented_image_path):\\n flag = 0\\n coordlist = []\\n temp_list = {}\\n out = yolo.predict(oriented_image_path, threshold=0.5)\\n test_img = cv2.imread(oriented_image_path)\\n if len(out[0]) == 0:\\n temp_list[\\\"object\\\"] = False\\n else:\\n for index, (value, label, confidence) in enumerate(zip(out[0], out[1], out[2])):\\n if label == \\\"traffic light\\\":\\n flag = 1\\n coordlist.append(\\n [int(value[0]), int(value[1]), int(value[2]), int(value[3])]\\n )\\n test_img = cv2.rectangle(\\n test_img,\\n (int(value[0]), int(value[1]), int(value[2]), int(value[3])),\\n (0, 0, 255),\\n 10,\\n )\\n textvalue = label + \\\"_\\\" + str(confidence)\\n cv2.putText(\\n test_img,\\n textvalue,\\n (int(value[0]), int(value[1]) - 10),\\n cv2.FONT_HERSHEY_SIMPLEX,\\n 1.5,\\n (0, 0, 255),\\n 2,\\n )\\n if flag == 1:\\n temp_list[\\\"object\\\"] = True\\n temp_list[\\\"coords\\\"] = coordlist\\n temp_list[\\\"assetname\\\"] = \\\"traffic light\\\"\\n return temp_list, test_img\";\n var nbb_cells = Jupyter.notebook.get_cells();\n for (var i = 0; i < nbb_cells.length; ++i) {\n if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n nbb_cells[i].set_text(nbb_formatted_code);\n }\n break;\n }\n }\n }, 500);\n ",
394309
"text/plain": [
395310
"<IPython.core.display.Javascript object>"
396311
]
@@ -500,24 +415,7 @@
500415
"outputs": [
501416
{
502417
"data": {
503-
"application/javascript": [
504-
"\n",
505-
" setTimeout(function() {\n",
506-
" var nbb_cell_id = 14;\n",
507-
" var nbb_unformatted_code = \"with open('traffic_light_data_sample.json', 'w') as f:\\n json.dump(data_list, f)\";\n",
508-
" var nbb_formatted_code = \"with open(\\\"traffic_light_data_sample.json\\\", \\\"w\\\") as f:\\n json.dump(data_list, f)\";\n",
509-
" var nbb_cells = Jupyter.notebook.get_cells();\n",
510-
" for (var i = 0; i < nbb_cells.length; ++i) {\n",
511-
" if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n",
512-
" if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n",
513-
" nbb_cells[i].set_text(nbb_formatted_code);\n",
514-
" }\n",
515-
" break;\n",
516-
" }\n",
517-
" }\n",
518-
" }, 500);\n",
519-
" "
520-
],
418+
"application/javascript": "\n setTimeout(function() {\n var nbb_cell_id = 14;\n var nbb_unformatted_code = \"with open('traffic_light_data_sample.json', 'w') as f:\\n json.dump(data_list, f)\";\n var nbb_formatted_code = \"with open(\\\"traffic_light_data_sample.json\\\", \\\"w\\\") as f:\\n json.dump(data_list, f)\";\n var nbb_cells = Jupyter.notebook.get_cells();\n for (var i = 0; i < nbb_cells.length; ++i) {\n if (nbb_cells[i].input_prompt_number == nbb_cell_id) {\n if (nbb_cells[i].get_text() == nbb_unformatted_code) {\n nbb_cells[i].set_text(nbb_formatted_code);\n }\n break;\n }\n }\n }, 500);\n ",
521419
"text/plain": [
522420
"<IPython.core.display.Javascript object>"
523421
]
@@ -1118,7 +1016,7 @@
11181016
"m.center = {'x': 25.28489583988743, 'y': 54.70681816057357,\n",
11191017
" 'spatialReference': {'wkid': 4326, 'latestWkid': 4326}}\n",
11201018
"m.zoom = 19\n",
1121-
"m.basemap = 'satellite'"
1019+
"m.basemap.basemap = 'satellite'"
11221020
]
11231021
},
11241022
{
@@ -1128,16 +1026,17 @@
11281026
"metadata": {},
11291027
"outputs": [],
11301028
"source": [
1029+
"from arcgis.map.symbols import SimpleMarkerSymbolEsriSMS\n",
1030+
"\n",
11311031
"for point in outpoints:\n",
11321032
" intpoint = {'x': point[0], 'y': point[1],\n",
11331033
" 'spatialReference': {'wkid': 102100,\n",
11341034
" 'latestWkid': 3857}}\n",
1135-
" m.draw(arcgis.geometry.Point(intpoint), symbol={\n",
1136-
" 'type': 'simple-marker',\n",
1137-
" 'style': 'square',\n",
1138-
" 'color': 'red',\n",
1139-
" 'size': '8px',\n",
1140-
" })"
1035+
" m.content.draw(arcgis.geometry.Point(intpoint), symbol=SimpleMarkerSymbolEsriSMS(**{\n",
1036+
" 'style': 'esriSMSSquare',\n",
1037+
" 'color': [255,0,0],\n",
1038+
" 'size': 8,\n",
1039+
" }))"
11411040
]
11421041
},
11431042
{

0 commit comments

Comments
 (0)