Skip to content

Commit d8ebfe9

Browse files
committed
Improved python tutorials to use display2D
1 parent fb8292e commit d8ebfe9

File tree

3 files changed

+43
-121
lines changed

3 files changed

+43
-121
lines changed

doc/pages/Python-tutorial-neural-networks.dox

Lines changed: 4 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -95,17 +95,8 @@ network.setScaleFactor(1.0/255.0)
9595
tensor2seg = fast.TensorToSegmentation.create()\
9696
.connect(network)
9797

98-
# Setup visualization
99-
renderer = fast.ImageRenderer.create()\
100-
.connect(importer)
101-
102-
# Set colors for each class
103-
segRenderer = fast.SegmentationRenderer.create(colors={1: fast.Color.Red(), 2: fast.Color.Blue()})\
104-
.connect(tensor2seg)
105-
106-
fast.SimpleWindow2D.create()\
107-
.connect([renderer, segRenderer])\
108-
.run()
98+
# Display results and set colors for each class
99+
fast.display2D(importer, tensor2seg, segmentationColors={1: fast.Color.Red(), 2: fast.Color.Blue()})
109100
@endcode
110101

111102
For convenience you can also use the SegmentationNetwork process object, which extends NeuralNetwork by applying TensorToSegmentation
@@ -121,15 +112,8 @@ network = fast.SegmentationNetwork.create(fast.Config.getTestDataPath() + "Neura
121112
# Add a preprocessing step: multiply each pixel with 1/255, thus normalizing the intensity
122113
network.setScaleFactor(1.0/255.0)
123114

124-
# Setup visualization
125-
renderer = fast.ImageRenderer.create().connect(importer)
126-
127-
segRenderer = fast.SegmentationRenderer.create(colors={1: fast.Color.Red(), 2: fast.Color.Blue()})\
128-
.connect(network)
129-
130-
fast.SimpleWindow2D.create()\
131-
.connect([renderer, segRenderer])\
132-
.run()
115+
# Display results and set colors for each class
116+
fast.display2D(importer, tensor2seg, segmentationColors={1: fast.Color.Red(), 2: fast.Color.Blue()})
133117
@endcode
134118

135119

doc/pages/Python-tutorial-ultrasound.dox

Lines changed: 23 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,17 @@ fast.SimpleWindow2D.create()\
4141
.run()
4242
@endcode
4343

44+
To reduce boilerplate code, you can use the [display2D](@ref shortcuts) function:
45+
46+
@code{.py}
47+
import fast
48+
49+
streamer = fast.MovieStreamer\
50+
.create(fast.Config.getTestDataPath() + "/US/sagittal_spine.avi")
51+
52+
fast.display2D(streamer)
53+
@endcode
54+
4455
@image html images/tutorials/ultrasound/read_video.jpg width=400px;
4556

4657
To get each frame from the video you can use the DataStream and a for loop.
@@ -120,12 +131,7 @@ streamer = fast.ImageFileStreamer.create(
120131
loop=True # Loop recording forever
121132
)
122133

123-
renderer = fast.ImageRenderer.create()\
124-
.connect(streamer)
125-
126-
fast.SimpleWindow2D.create()\
127-
.connect(renderer)\
128-
.run()
134+
fast.display2D(streamer)
129135
@endcode
130136

131137
@image html images/examples/python/left_ventricle.jpg width=250px;
@@ -142,7 +148,7 @@ streamer = fast.ImageFileStreamer.create(
142148
loop=True # Loop recording forever
143149
)
144150

145-
# Use SlicerWindow to display the 3D data
151+
# Use SlicerWindow to display the 3D data, you can also use the display3D function
146152
fast.SlicerWindow.create()\
147153
.connectImage(streamer)\
148154
.run()
@@ -189,15 +195,9 @@ streamer = fast.ImageFileStreamer.create(
189195
framerate=20, # Specify framerate to stream data in
190196
)
191197

192-
renderer = fast.ImageRenderer.create()\
193-
.connect(streamer)
194-
195-
# Create playback widget and connect it to the window
198+
# Create playback widget and use it in the display window
196199
widget = fast.PlaybackWidget(streamer)
197-
fast.SimpleWindow2D.create()\
198-
.connect(renderer)\
199-
.connect(widget)\
200-
.run()
200+
fast.display2D(streamer, widgets=[widget])
201201
@endcode
202202

203203
@image html images/tutorials/ultrasound/playback_widget.jpg
@@ -222,14 +222,8 @@ streamer = fast.UFFStreamer.create(
222222
scanConversionHeight=1024,
223223
)
224224

225-
renderer = fast.ImageRenderer.create()\
226-
.connect(streamer)
227-
228225
widget = fast.PlaybackWidget(streamer)
229-
fast.SimpleWindow2D.create()\
230-
.connect(renderer)\
231-
.connect(widget)\
232-
.run()
226+
fast.display2D(streamer, widgets=[widget])
233227
@endcode
234228

235229
@image html images/tutorials/ultrasound/read_uff_data.jpg width=512px;
@@ -249,14 +243,8 @@ streamer = fast.UFFStreamer.create(
249243
dynamicRange=60,
250244
)
251245

252-
renderer = fast.ImageRenderer.create()\
253-
.connect(streamer)
254-
255246
widget = fast.PlaybackWidget(streamer)
256-
fast.SimpleWindow2D.create()\
257-
.connect(renderer)\
258-
.connect(widget)\
259-
.run()
247+
fast.display2D(streamer, widgets=[widget])
260248
@endcode
261249

262250
@image html images/tutorials/ultrasound/read_uff_data_beamspace.jpg width=512px;
@@ -286,12 +274,7 @@ scan_convert = fast.ScanConverter.create(
286274
endAngle=0.785398
287275
).connect(data)
288276

289-
renderer = fast.ImageRenderer.create()\
290-
.connect(scan_convert)
291-
292-
fast.SimpleWindow2D.create()\
293-
.connect(renderer)\
294-
.run()
277+
fast.display2D(scan_convert)
295278

296279
# Visualize output of scan converter with matplotlib instead of FAST:
297280
plt.imshow(np.asarray(scan_convert.runAndGetOutputData())[..., 0], cmap='gray')
@@ -325,12 +308,7 @@ scan_convert = fast.ScanConverter.create(
325308
endAngle=0.785398
326309
).connect(envelope)
327310

328-
renderer = fast.ImageRenderer.create()\
329-
.connect(scan_convert)
330-
331-
fast.SimpleWindow2D.create()\
332-
.connect(renderer)\
333-
.run()
311+
fast.display2D(scan_convert)
334312

335313
# Visualize output of scan converter with matplotlib instead of FAST:
336314
plt.imshow(np.asarray(scan_convert.runAndGetOutputData())[..., 0], cmap='gray')
@@ -374,13 +352,7 @@ Here is an example of how to use the OpenIGTLinkStreamer in FAST to stream image
374352
import fast
375353

376354
streamer = fast.OpenIGTLinkStreamer.create("localhost", 18944)
377-
378-
renderer = fast.ImageRenderer.create()\
379-
.connect(streamer)
380-
381-
fast.SimpleWindow2D.create()\
382-
.connect(renderer)\
383-
.run()
355+
fast.display2D(streamer)
384356
@endcode
385357

386358
### Clarius
@@ -401,13 +373,7 @@ Thus you need to disable the windows firewall or add an exception in the firewal
401373
import fast
402374

403375
streamer = fast.ClariusStreamer.create("192.168.1.1", 5828)
404-
405-
renderer = fast.ImageRenderer.create()\
406-
.connect(streamer)
407-
408-
fast.SimpleWindow2D.create()\
409-
.connect(renderer)\
410-
.run()
376+
fast.display2D(streamer)
411377
@endcode
412378

413379
Ultrasound image processing
@@ -591,12 +557,7 @@ importer = fast.ImageFileStreamer.create(
591557
inverter = Inverter.create()\
592558
.connect(importer)
593559

594-
renderer = fast.ImageRenderer.create()\
595-
.connect(inverter)
596-
597-
fast.SimpleWindow2D.create()\
598-
.connect(renderer)\
599-
.run()
560+
fast.display2D(inverter)
600561
@endcode
601562

602563
@image html images/tutorials/ultrasound/python_process_object.jpg width=400px;
@@ -802,14 +763,8 @@ nlm = fast.NonLocalMeans.create(
802763
inputMultiplicationWeight=0.5,
803764
).connect(importer)
804765

805-
rendererNLM = fast.ImageRenderer.create()\
806-
.connect(nlm)
807-
808766
sliderWidget = fast.SliderWidget('Smoothing', 0.2, 0.05, 0.8, 0.05, fast.SliderCallback(lambda x: nlm.setSmoothingAmount(x)))
809-
fast.SimpleWindow2D.create()\
810-
.connect(rendererNLM)\
811-
.connect(sliderWidget)\
812-
.run()
767+
fast.display2D(nlm, widgets=[sliderWidget])
813768
@endcode
814769

815770
@image html images/tutorials/ultrasound/simple_gui.jpg width=400px;

doc/pages/Python-tutorial-wsi.dox

Lines changed: 16 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,17 @@ fast.SimpleWindow2D.create()\
4242
.run()
4343
@endcode
4444

45+
To reduce boilerplate code, you can use the [display2D](@ref shortcuts) function:
46+
47+
@code{.py}
48+
import fast
49+
50+
importer = fast.WholeSlideImageImporter\
51+
.create(fast.Config.getTestDataPath() + "/WSI/CMU-1.svs")
52+
53+
fast.display2D(imagePyramid=importer)
54+
@endcode
55+
4556
@image html images/tutorials/wsi/open_wsi.jpg width=512px;
4657

4758
Tissue segmentation
@@ -60,16 +71,7 @@ importer = fast.WholeSlideImageImporter\
6071
tissueSegmentation = fast.TissueSegmentation.create()\
6172
.connect(importer)
6273

63-
renderer = fast.ImagePyramidRenderer.create()\
64-
.connect(importer)
65-
66-
segmentationRenderer = fast.SegmentationRenderer.create()\
67-
.connect(tissueSegmentation)
68-
69-
fast.SimpleWindow2D.create()\
70-
.connect(renderer)\
71-
.connect(segmentationRenderer)\
72-
.run()
74+
fast.display2D(imagePyramid=importer, segmentation=tissueSegmentation)
7375
@endcode
7476

7577
@image html images/tutorials/wsi/tissue_segmentation.jpg width=512px;
@@ -229,16 +231,7 @@ stitcher = fast.PatchStitcher.create()\
229231
.connect(segmentation)
230232

231233
# Display the stitched segmentation results on top of the WSI
232-
renderer = fast.ImagePyramidRenderer.create()\
233-
.connect(importer)
234-
235-
segmentationRenderer = fast.SegmentationRenderer.create()\
236-
.connect(stitcher)
237-
238-
fast.SimpleWindow2D.create()\
239-
.connect(renderer)\
240-
.connect(segmentationRenderer)\
241-
.run()
234+
fast.display2D(imagePyramid=importer, segmentation=stitcher)
242235
@endcode
243236

244237
@image html images/tutorials/wsi/nuclei_segmentation_full.jpg width=400px;
@@ -299,12 +292,7 @@ import fast
299292

300293
importer = fast.TIFFImagePyramidImporter.create('segmented-nuclei.tiff')
301294

302-
segmentationRenderer = fast.SegmentationRenderer.create()\
303-
.connect(importer)
304-
305-
fast.SimpleWindow2D.create()\
306-
.connect(segmentationRenderer)\
307-
.run()
295+
fast.display2D(segmentation=importer)
308296
@endcode
309297

310298
@image html images/tutorials/wsi/nuclei_segmentation_export_import.jpg width=400px;
@@ -368,12 +356,7 @@ import fast
368356

369357
importer = fast.ImagePyramidPatchImporter.create('wsi-export-folder/')
370358

371-
renderer = fast.ImagePyramidRenderer.create()\
372-
.connect(importer)
373-
374-
fast.SimpleWindow2D.create()\
375-
.connect(renderer)\
376-
.run()
359+
fast.display2D(imagePyramid=importer)
377360
@endcode
378361

379362
Tissue micro array (TMA) extractor
@@ -388,7 +371,7 @@ import fast
388371
import matplotlib.pyplot as plt
389372

390373
importer = fast.WholeSlideImageImporter\
391-
.create('/home/smistad/data/TMA/TMA_TA407.svs')
374+
.create('TMA.tiff')
392375

393376
# Extract tissue cores at image pyramid level 1
394377
extractor = fast.TissueMicroArrayExtractor.create(level=1)\

0 commit comments

Comments
 (0)