|
11 | 11 | "We will begin by importing the modules that are necessary for this notebook." |
12 | 12 | ] |
13 | 13 | }, |
| 14 | + { |
| 15 | + "cell_type": "markdown", |
| 16 | + "metadata": {}, |
| 17 | + "source": [ |
| 18 | + "## Setup" |
| 19 | + ] |
| 20 | + }, |
14 | 21 | { |
15 | 22 | "cell_type": "code", |
16 | 23 | "execution_count": null, |
|
20 | 27 | "import scipp as sc\n", |
21 | 28 | "import sciline\n", |
22 | 29 | "from ess import amor\n", |
23 | | - "from ess.reflectometry.types import *" |
| 30 | + "from ess.reflectometry.types import *\n", |
| 31 | + "from ess.amor.types import *" |
24 | 32 | ] |
25 | 33 | }, |
26 | 34 | { |
|
30 | 38 | "outputs": [], |
31 | 39 | "source": [ |
32 | 40 | "pl = sciline.Pipeline(\n", |
33 | | - " (*amor.providers, *amor.data.providers), params=amor.default_parameters\n", |
| 41 | + " (\n", |
| 42 | + " *amor.providers,\n", |
| 43 | + " *amor.data.providers,\n", |
| 44 | + " ),\n", |
| 45 | + " params=amor.default_parameters,\n", |
34 | 46 | ")\n", |
35 | 47 | "\n", |
36 | | - "pl[QBins] = sc.geomspace(dim='Q', start=0.008, stop=0.075, num=200, unit='1/angstrom')\n", |
37 | | - "pl[SampleRotation[Sample]] = sc.scalar(0.7989, unit='deg')\n", |
38 | | - "pl[PoochFilename[Sample]] = \"sample.nxs\"\n", |
39 | | - "pl[SampleRotation[Reference]] = sc.scalar(0.8389, unit='deg')\n", |
40 | | - "pl[PoochFilename[Reference]] = \"reference.nxs\"\n", |
41 | | - "pl[WavelengthEdges] = sc.array(dims=['wavelength'], values=[2.4, 16.0], unit='angstrom')" |
| 48 | + "pl[SampleSize[Sample]] = sc.scalar(10.0, unit='mm')\n", |
| 49 | + "pl[SampleSize[Reference]] = sc.scalar(10.0, unit='mm')\n", |
| 50 | + "\n", |
| 51 | + "pl[ChopperPhase[Reference]] = sc.scalar(-7.5, unit='deg')\n", |
| 52 | + "pl[ChopperPhase[Sample]] = sc.scalar(-7.5, unit='deg')\n", |
| 53 | + "\n", |
| 54 | + "pl[QBins] = sc.geomspace(dim='Q', start=0.005, stop=0.3, num=391, unit='1/angstrom')\n", |
| 55 | + "pl[WavelengthBins] = sc.geomspace('wavelength', 2.8, 12, 301, unit='angstrom')\n", |
| 56 | + "\n", |
| 57 | + "# The YIndexLimits and ZIndexLimits define ranges on the detector where\n", |
| 58 | + "# data is considered to be valid signal.\n", |
| 59 | + "# They represent the lower and upper boundaries of a range of pixel indices.\n", |
| 60 | + "pl[YIndexLimits] = sc.scalar(11, unit=None), sc.scalar(41, unit=None)\n", |
| 61 | + "pl[ZIndexLimits] = sc.scalar(80, unit=None), sc.scalar(370, unit=None)" |
| 62 | + ] |
| 63 | + }, |
| 64 | + { |
| 65 | + "cell_type": "code", |
| 66 | + "execution_count": null, |
| 67 | + "metadata": {}, |
| 68 | + "outputs": [], |
| 69 | + "source": [ |
| 70 | + "pl.visualize(NormalizedIofQ, graph_attr={'rankdir': 'LR'})" |
| 71 | + ] |
| 72 | + }, |
| 73 | + { |
| 74 | + "cell_type": "markdown", |
| 75 | + "metadata": {}, |
| 76 | + "source": [ |
| 77 | + "## Caching the reference result\n", |
| 78 | + "\n", |
| 79 | + "The reference result (used for normalizing the sample data) only needs to be computed once.\n", |
| 80 | + "It represents the intensity reflected by the super-mirror.\n", |
| 81 | + "\n", |
| 82 | + "We compute it using the pipeline and thereafter set the result back on the original pipeline." |
42 | 83 | ] |
43 | 84 | }, |
44 | 85 | { |
|
47 | 88 | "metadata": {}, |
48 | 89 | "outputs": [], |
49 | 90 | "source": [ |
50 | | - "pl.visualize((NormalizedIofQ, QResolution), graph_attr={'rankdir': 'LR'})" |
| 91 | + "pl[TutorialFilename[Reference]] = \"amor2023n000614.hdf\"\n", |
| 92 | + "# The sample rotation value in the file is slightly off, so we set it manually\n", |
| 93 | + "pl[SampleRotation[Reference]] = sc.scalar(0.65, unit='deg')\n", |
| 94 | + "\n", |
| 95 | + "reference_result = pl.compute(IdealReferenceIntensity)\n", |
| 96 | + "# Set the result back onto the pipeline to cache it\n", |
| 97 | + "pl[IdealReferenceIntensity] = reference_result" |
| 98 | + ] |
| 99 | + }, |
| 100 | + { |
| 101 | + "cell_type": "markdown", |
| 102 | + "metadata": {}, |
| 103 | + "source": [ |
| 104 | + "If we now visualize the pipeline again, we can see that the reference is not re-computed:" |
51 | 105 | ] |
52 | 106 | }, |
53 | 107 | { |
|
56 | 110 | "metadata": {}, |
57 | 111 | "outputs": [], |
58 | 112 | "source": [ |
59 | | - "# Compute I over Q and the standard deviation of Q\n", |
60 | | - "ioq, qstd = pl.compute((NormalizedIofQ, QResolution)).values()" |
| 113 | + "pl.visualize(NormalizedIofQ, graph_attr={'rankdir': 'LR'})" |
| 114 | + ] |
| 115 | + }, |
| 116 | + { |
| 117 | + "cell_type": "markdown", |
| 118 | + "metadata": {}, |
| 119 | + "source": [ |
| 120 | + "## Computing sample reflectivity\n", |
| 121 | + "\n", |
| 122 | + "We now compute the sample reflectivity from 3 runs that used different sample rotation angles.\n", |
| 123 | + "The different rotation angles cover different ranges in $Q$." |
61 | 124 | ] |
62 | 125 | }, |
63 | 126 | { |
|
66 | 129 | "metadata": {}, |
67 | 130 | "outputs": [], |
68 | 131 | "source": [ |
69 | | - "import matplotlib.pyplot as plt\n", |
| 132 | + "pl[TutorialFilename[Sample]] = \"amor2023n000608.hdf\"\n", |
| 133 | + "pl[SampleRotation[Sample]] = sc.scalar(0.85, unit='deg')\n", |
| 134 | + "ioq8 = pl.compute(NormalizedIofQ).hist()\n", |
| 135 | + "\n", |
| 136 | + "pl[TutorialFilename[Sample]] = \"amor2023n000609.hdf\"\n", |
| 137 | + "pl[SampleRotation[Sample]] = sc.scalar(2.25, unit='deg')\n", |
| 138 | + "ioq9 = pl.compute(NormalizedIofQ).hist()\n", |
70 | 139 | "\n", |
71 | | - "fig = plt.figure(figsize=(5, 7))\n", |
72 | | - "ax1 = fig.add_axes([0, 0.55, 1.0, 0.45])\n", |
73 | | - "ax2 = fig.add_axes([0, 0.0, 1.0, 0.45])\n", |
74 | | - "cax = fig.add_axes([1.05, 0.55, 0.03, 0.45])\n", |
75 | | - "fig1 = ioq.plot(norm='log', ax=ax1, cax=cax, grid=True)\n", |
76 | | - "fig2 = ioq.mean('detector_number').plot(norm='log', ax=ax2, grid=True)\n", |
77 | | - "fig1.canvas.xrange = fig2.canvas.xrange" |
| 140 | + "pl[TutorialFilename[Sample]] = \"amor2023n000610.hdf\"\n", |
| 141 | + "pl[SampleRotation[Sample]] = sc.scalar(3.65, unit='deg')\n", |
| 142 | + "ioq10 = pl.compute(NormalizedIofQ).hist()\n", |
| 143 | + "\n", |
| 144 | + "sc.plot({'608': ioq8, '609': ioq9, '610': ioq10}, norm='log', vmin=1e-4)" |
| 145 | + ] |
| 146 | + }, |
| 147 | + { |
| 148 | + "cell_type": "markdown", |
| 149 | + "metadata": {}, |
| 150 | + "source": [ |
| 151 | + "### Additional diagnostics plots" |
| 152 | + ] |
| 153 | + }, |
| 154 | + { |
| 155 | + "cell_type": "code", |
| 156 | + "execution_count": null, |
| 157 | + "metadata": {}, |
| 158 | + "outputs": [], |
| 159 | + "source": [ |
| 160 | + "pl[TutorialFilename[Sample]] = \"amor2023n000608.hdf\"\n", |
| 161 | + "pl[SampleRotation[Sample]] = sc.scalar(0.85, unit='deg')\n", |
| 162 | + "pl.compute(ReflectivityDiagnosticsView)" |
78 | 163 | ] |
79 | 164 | }, |
80 | 165 | { |
81 | 166 | "cell_type": "markdown", |
82 | 167 | "metadata": {}, |
83 | 168 | "source": [ |
84 | 169 | "## Make a $(\\lambda, \\theta)$ map\n", |
85 | | - "A good sanity check is to create a two-dimensional map of the counts in $\\lambda$ and $\\theta$ bins. To achieve this, we request the `ThetaData` from the pipeline. In the graph above we can see that `WavelengthData` is required to compute `ThetaData`, therefore it is also present in `ThetaData` so we don't need to require it separately." |
| 170 | + "A good sanity check is to create a two-dimensional map of the counts in $\\lambda$ and $\\theta$ bins and make sure the triangles converge at the origin." |
86 | 171 | ] |
87 | 172 | }, |
88 | 173 | { |
|
91 | 176 | "metadata": {}, |
92 | 177 | "outputs": [], |
93 | 178 | "source": [ |
94 | | - "from ess.reflectometry.types import ThetaData\n", |
95 | | - "\n", |
96 | | - "pl.compute(ThetaData[Sample]).bins.concat('detector_number').hist(\n", |
97 | | - " theta=sc.linspace(dim='theta', start=0.0, stop=1.2, num=165, unit='deg').to(\n", |
98 | | - " unit='rad'\n", |
99 | | - " ),\n", |
100 | | - " wavelength=sc.linspace(\n", |
101 | | - " dim='wavelength', start=0, stop=15.0, num=165, unit='angstrom'\n", |
102 | | - " ),\n", |
103 | | - ").plot()" |
| 179 | + "pl.compute(WavelengthThetaFigure)" |
104 | 180 | ] |
105 | 181 | }, |
106 | 182 | { |
107 | 183 | "cell_type": "markdown", |
108 | 184 | "metadata": {}, |
109 | 185 | "source": [ |
110 | | - "This plot can be used to check if the value of the sample rotation angle $\\omega$ is correct. The bright triangles should be pointing back to the origin $\\lambda = \\theta = 0$." |
| 186 | + "This plot can be used to check if the value of the sample rotation angle $\\omega$ is correct. The bright triangles should be pointing back to the origin $\\lambda = \\theta = 0$. In the figure above the black lines are all passing through the origin." |
111 | 187 | ] |
112 | 188 | }, |
113 | 189 | { |
|
139 | 215 | "metadata": {}, |
140 | 216 | "outputs": [], |
141 | 217 | "source": [ |
142 | | - "for p in (*orso.providers, *amor.orso.providers):\n", |
143 | | - " pl.insert(p)\n", |
144 | | - "\n", |
145 | 218 | "pl[orso.OrsoCreator] = orso.OrsoCreator(\n", |
146 | 219 | " fileio.base.Person(\n", |
147 | 220 | " name='Max Mustermann',\n", |
|
151 | 224 | ")" |
152 | 225 | ] |
153 | 226 | }, |
154 | | - { |
155 | | - "cell_type": "markdown", |
156 | | - "metadata": {}, |
157 | | - "source": [ |
158 | | - "Then, we recompute $I(Q)$ and and combine it with the ORSO metadata:" |
159 | | - ] |
160 | | - }, |
161 | 227 | { |
162 | 228 | "cell_type": "code", |
163 | 229 | "execution_count": null, |
164 | 230 | "metadata": {}, |
165 | 231 | "outputs": [], |
166 | 232 | "source": [ |
167 | | - "iofq_dataset = pl.compute(orso.OrsoIofQDataset)" |
| 233 | + "pl.visualize(orso.OrsoIofQDataset, graph_attr={'rankdir': 'LR'})" |
168 | 234 | ] |
169 | 235 | }, |
170 | 236 | { |
171 | 237 | "cell_type": "markdown", |
172 | 238 | "metadata": {}, |
173 | 239 | "source": [ |
174 | | - "Unfortunately, some metadata could not be determined automatically.\n", |
175 | | - "In particular, we need to specify the sample manually:" |
| 240 | + "We build our ORSO dataset from the computed $I(Q)$ and the ORSO metadata:" |
176 | 241 | ] |
177 | 242 | }, |
178 | 243 | { |
|
181 | 246 | "metadata": {}, |
182 | 247 | "outputs": [], |
183 | 248 | "source": [ |
184 | | - "iofq_dataset.info.data_source.sample" |
185 | | - ] |
186 | | - }, |
187 | | - { |
188 | | - "cell_type": "code", |
189 | | - "execution_count": null, |
190 | | - "metadata": {}, |
191 | | - "outputs": [], |
192 | | - "source": [ |
193 | | - "iofq_dataset.info.data_source.sample = fileio.data_source.Sample(\n", |
194 | | - " name='Ni / Ti Multilayer',\n", |
195 | | - " model=fileio.data_source.SampleModel(\n", |
196 | | - " stack='air | (Ni | Ti) * 5 | Si',\n", |
197 | | - " ),\n", |
198 | | - ")" |
| 249 | + "iofq_dataset = pl.compute(orso.OrsoIofQDataset)\n", |
| 250 | + "iofq_dataset" |
199 | 251 | ] |
200 | 252 | }, |
201 | 253 | { |
202 | 254 | "cell_type": "markdown", |
203 | 255 | "metadata": {}, |
204 | 256 | "source": [ |
205 | | - "And we also add the URL of this notebook to make it easier to reproduce the data:" |
| 257 | + "We also add the URL of this notebook to make it easier to reproduce the data:" |
206 | 258 | ] |
207 | 259 | }, |
208 | 260 | { |
|
0 commit comments