Skip to content

Commit cf65f47

Browse files
authored
Merge pull request #210 from scipp/standard-domain-type-names
Change domain names to standard scheme
2 parents 29f1d75 + 762c57e commit cf65f47

39 files changed

+366
-359
lines changed

docs/user-guide/beer/beer_modulation_mcstas.ipynb

Lines changed: 15 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,10 @@
2525
"metadata": {},
2626
"outputs": [],
2727
"source": [
28-
"import scipp as sc\n",
2928
"import scippneutron as scn\n",
3029
"\n",
3130
"from ess.beer import BeerModMcStasWorkflow, BeerModMcStasWorkflowKnownPeaks\n",
3231
"from ess.beer.data import mcstas_silicon_medium_resolution, mcstas_duplex, duplex_peaks_array, silicon_peaks_array\n",
33-
"from ess.reduce.nexus.types import Filename, SampleRun\n",
34-
"from ess.reduce.time_of_flight.types import DetectorTofData\n",
3532
"from ess.beer.types import *\n",
3633
"\n",
3734
"# Default bin edges for our d_hkl histograms\n",
@@ -173,7 +170,7 @@
173170
"source": [
174171
"wf = BeerModMcStasWorkflowKnownPeaks()\n",
175172
"wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n",
176-
"wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
173+
"wf.compute(RawDetector[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
177174
]
178175
},
179176
{
@@ -192,7 +189,7 @@
192189
"outputs": [],
193190
"source": [
194191
"wf[DHKLList] = silicon_peaks_array()\n",
195-
"da = wf.compute(DetectorTofData[SampleRun])\n",
192+
"da = wf.compute(TofDetector[SampleRun])\n",
196193
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
197194
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), silicon_peaks_array())"
198195
]
@@ -214,7 +211,7 @@
214211
"source": [
215212
"wf = BeerModMcStasWorkflow()\n",
216213
"wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n",
217-
"da = wf.compute(DetectorTofData[SampleRun])\n",
214+
"da = wf.compute(TofDetector[SampleRun])\n",
218215
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
219216
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), silicon_peaks_array())"
220217
]
@@ -268,7 +265,7 @@
268265
"source": [
269266
"wf = BeerModMcStasWorkflowKnownPeaks()\n",
270267
"wf[Filename[SampleRun]] = mcstas_duplex(8)\n",
271-
"wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
268+
"wf.compute(RawDetector[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
272269
]
273270
},
274271
{
@@ -287,7 +284,7 @@
287284
"outputs": [],
288285
"source": [
289286
"wf[DHKLList] = duplex_peaks_array()\n",
290-
"da = wf.compute(DetectorTofData[SampleRun])\n",
287+
"da = wf.compute(TofDetector[SampleRun])\n",
291288
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
292289
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), duplex_peaks_array())"
293290
]
@@ -309,7 +306,7 @@
309306
"source": [
310307
"wf = BeerModMcStasWorkflow()\n",
311308
"wf[Filename[SampleRun]] = mcstas_duplex(8)\n",
312-
"da = wf.compute(DetectorTofData[SampleRun])\n",
309+
"da = wf.compute(TofDetector[SampleRun])\n",
313310
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
314311
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), duplex_peaks_array())"
315312
]
@@ -363,7 +360,7 @@
363360
"source": [
364361
"wf = BeerModMcStasWorkflowKnownPeaks()\n",
365362
"wf[Filename[SampleRun]] = mcstas_duplex(9)\n",
366-
"wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
363+
"wf.compute(RawDetector[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
367364
]
368365
},
369366
{
@@ -382,7 +379,7 @@
382379
"outputs": [],
383380
"source": [
384381
"wf[DHKLList] = duplex_peaks_array()\n",
385-
"da = wf.compute(DetectorTofData[SampleRun])\n",
382+
"da = wf.compute(TofDetector[SampleRun])\n",
386383
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
387384
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), duplex_peaks_array())"
388385
]
@@ -404,7 +401,7 @@
404401
"source": [
405402
"wf = BeerModMcStasWorkflow()\n",
406403
"wf[Filename[SampleRun]] = mcstas_duplex(9)\n",
407-
"da = wf.compute(DetectorTofData[SampleRun])\n",
404+
"da = wf.compute(TofDetector[SampleRun])\n",
408405
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
409406
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), duplex_peaks_array())"
410407
]
@@ -458,7 +455,7 @@
458455
"source": [
459456
"wf = BeerModMcStasWorkflowKnownPeaks()\n",
460457
"wf[Filename[SampleRun]] = mcstas_duplex(10)\n",
461-
"wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
458+
"wf.compute(RawDetector[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
462459
]
463460
},
464461
{
@@ -477,7 +474,7 @@
477474
"outputs": [],
478475
"source": [
479476
"wf[DHKLList] = duplex_peaks_array()\n",
480-
"da = wf.compute(DetectorTofData[SampleRun])\n",
477+
"da = wf.compute(TofDetector[SampleRun])\n",
481478
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
482479
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), duplex_peaks_array())"
483480
]
@@ -499,7 +496,7 @@
499496
"source": [
500497
"wf = BeerModMcStasWorkflow()\n",
501498
"wf[Filename[SampleRun]] = mcstas_duplex(10)\n",
502-
"da = wf.compute(DetectorTofData[SampleRun])\n",
499+
"da = wf.compute(TofDetector[SampleRun])\n",
503500
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
504501
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), duplex_peaks_array())"
505502
]
@@ -553,7 +550,7 @@
553550
"source": [
554551
"wf = BeerModMcStasWorkflowKnownPeaks()\n",
555552
"wf[Filename[SampleRun]] = mcstas_duplex(16)\n",
556-
"wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
553+
"wf.compute(RawDetector[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')"
557554
]
558555
},
559556
{
@@ -572,7 +569,7 @@
572569
"outputs": [],
573570
"source": [
574571
"wf[DHKLList] = duplex_peaks_array()\n",
575-
"da = wf.compute(DetectorTofData[SampleRun])\n",
572+
"da = wf.compute(TofDetector[SampleRun])\n",
576573
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
577574
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), duplex_peaks_array())"
578575
]
@@ -594,7 +591,7 @@
594591
"source": [
595592
"wf = BeerModMcStasWorkflow()\n",
596593
"wf[Filename[SampleRun]] = mcstas_duplex(16)\n",
597-
"da = wf.compute(DetectorTofData[SampleRun])\n",
594+
"da = wf.compute(TofDetector[SampleRun])\n",
598595
"da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n",
599596
"ground_truth_peak_positions(da.hist(dspacing=dspacing, dim=da.dims).plot(), duplex_peaks_array())"
600597
]

docs/user-guide/dream/dream-advanced-powder-reduction.ipynb

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@
111111
"metadata": {},
112112
"outputs": [],
113113
"source": [
114-
"workflow.visualize(IofDspacingTwoTheta[SampleRun], graph_attr={\"rankdir\": \"LR\"})"
114+
"workflow.visualize(IntensityDspacingTwoTheta[SampleRun], graph_attr={\"rankdir\": \"LR\"})"
115115
]
116116
},
117117
{
@@ -129,7 +129,7 @@
129129
"metadata": {},
130130
"outputs": [],
131131
"source": [
132-
"grouped_dspacing = workflow.compute(IofDspacingTwoTheta[SampleRun])\n",
132+
"grouped_dspacing = workflow.compute(IntensityDspacingTwoTheta[SampleRun])\n",
133133
"grouped_dspacing"
134134
]
135135
},
@@ -212,7 +212,7 @@
212212
"metadata": {},
213213
"outputs": [],
214214
"source": [
215-
"workflow.visualize(IofDspacing[SampleRun], graph_attr={\"rankdir\": \"LR\"})"
215+
"workflow.visualize(IntensityDspacing[SampleRun], graph_attr={\"rankdir\": \"LR\"})"
216216
]
217217
},
218218
{
@@ -230,7 +230,7 @@
230230
"metadata": {},
231231
"outputs": [],
232232
"source": [
233-
"result = workflow.compute(IofDspacing[SampleRun])\n",
233+
"result = workflow.compute(IntensityDspacing[SampleRun])\n",
234234
"result.hist().plot(title=result.coords['detector'].value.capitalize())"
235235
]
236236
},
@@ -288,7 +288,7 @@
288288
"metadata": {},
289289
"outputs": [],
290290
"source": [
291-
"workflow.visualize(IofDspacing[SampleRun], graph_attr={\"rankdir\": \"LR\"})"
291+
"workflow.visualize(IntensityDspacing[SampleRun], graph_attr={\"rankdir\": \"LR\"})"
292292
]
293293
},
294294
{
@@ -306,7 +306,7 @@
306306
"metadata": {},
307307
"outputs": [],
308308
"source": [
309-
"result = workflow.compute(IofDspacing[SampleRun])\n",
309+
"result = workflow.compute(IntensityDspacing[SampleRun])\n",
310310
"result.hist().plot(title=result.coords['detector'].value.capitalize())"
311311
]
312312
},
@@ -329,8 +329,8 @@
329329
"metadata": {},
330330
"outputs": [],
331331
"source": [
332-
"intermediates = workflow.compute((CountsWavelength[SampleRun], MaskedData[SampleRun]))\n",
333-
"intermediates[CountsWavelength[SampleRun]]"
332+
"intermediates = workflow.compute((WavelengthDetector[SampleRun], CorrectedDetector[SampleRun]))\n",
333+
"intermediates[WavelengthDetector[SampleRun]]"
334334
]
335335
},
336336
{
@@ -341,7 +341,7 @@
341341
"outputs": [],
342342
"source": [
343343
"two_theta = sc.linspace(\"two_theta\", 0.8, 2.4, 301, unit=\"rad\")\n",
344-
"intermediates[MaskedData[SampleRun]].hist(two_theta=two_theta, wavelength=300).plot(\n",
344+
"intermediates[CorrectedDetector[SampleRun]].hist(two_theta=two_theta, wavelength=300).plot(\n",
345345
" norm=\"log\"\n",
346346
")"
347347
]

docs/user-guide/dream/dream-make-tof-lookup-table.ipynb

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
"source": [
2020
"import scipp as sc\n",
2121
"from ess.reduce import time_of_flight\n",
22+
"from ess.reduce.nexus.types import AnyRun\n",
2223
"from ess.dream.beamline import InstrumentConfiguration, choppers"
2324
]
2425
},
@@ -62,7 +63,7 @@
6263
"wf[time_of_flight.LtotalRange] = sc.scalar(60.0, unit=\"m\"), sc.scalar(80.0, unit=\"m\")\n",
6364
"wf[time_of_flight.NumberOfSimulatedNeutrons] = 200_000 # Increase this number for more reliable results\n",
6465
"wf[time_of_flight.SourcePosition] = sc.vector([0, 0, 0], unit='m')\n",
65-
"wf[time_of_flight.DiskChoppers] = disk_choppers\n",
66+
"wf[time_of_flight.DiskChoppers[AnyRun]] = disk_choppers\n",
6667
"wf[time_of_flight.DistanceResolution] = sc.scalar(0.1, unit=\"m\")\n",
6768
"wf[time_of_flight.TimeResolution] = sc.scalar(250.0, unit='us')\n",
6869
"wf[time_of_flight.LookupTableRelativeErrorThreshold] = 0.02\n",

docs/user-guide/dream/dream-powder-reduction.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -229,7 +229,7 @@
229229
"name": "python",
230230
"nbconvert_exporter": "python",
231231
"pygments_lexer": "ipython3",
232-
"version": "3.10.14"
232+
"version": "3.11.10"
233233
}
234234
},
235235
"nbformat": 4,

docs/user-guide/dream/workflow-widget-dream.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@
5959
"wfw = widget.children[1].children[0]\n",
6060
"outputs = wfw.output_selection_box.typical_outputs_widget\n",
6161
"keys, values = zip(*outputs.options, strict=True)\n",
62-
"ind = keys.index(\"IofDspacing[SampleRun]\")\n",
62+
"ind = keys.index(\"IntensityDspacing[SampleRun]\")\n",
6363
"outputs.value = (values[ind],)\n",
6464
"# Refresh parameters\n",
6565
"pbox = wfw.parameter_box\n",

0 commit comments

Comments
 (0)