Skip to content

Commit f160648

Browse files
committed
chore(tests): refine swath start test report
1 parent 82f15ed commit f160648

File tree

1 file changed

+34
-14
lines changed

1 file changed

+34
-14
lines changed

tests/reports/l1b_swath_start.ipynb

Lines changed: 34 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,11 @@
3636
"location; which is why one usually requires a minimum coherence for any\n",
3737
"processed sample.\n",
3838
"\n",
39-
"With those assumptions and choices, I search for the maximum coherence\n",
40-
"in a 10 m window after the coherence first exceeds 0.85 and name this\n",
41-
"sample the POCA. In another window from 5 to 50 m after the POCA, I\n",
42-
"search for a rise in coherence. This is where the swath processing\n",
43-
"starts.\n",
39+
"With those assumptions and choices, I search for the maximum of the\n",
40+
"smoothed coherence in a 10 m window after the coherence first exceeds\n",
41+
"the coherence threshold and name this sample the POCA. In another window\n",
42+
"from 5 to 50 m after the POCA, I search for a rise in coherence. This is\n",
43+
"where the swath processing starts.\n",
4444
"\n",
4545
"In the following, \"edge cases\" are recognized by an early or late swath\n",
4646
"start. For the _k_ earliest and latest of those, the coherence waveform\n",
@@ -53,9 +53,10 @@
5353
"metadata": {},
5454
"outputs": [],
5555
"source": [
56-
"k_smallest = 5\n",
57-
"k_biggest = 5\n",
58-
"k_random = 5"
56+
"k_zero = 3\n",
57+
"k_smallest = 3\n",
58+
"k_biggest = 3\n",
59+
"k_random = 3"
5960
]
6061
},
6162
{
@@ -95,6 +96,15 @@
9596
"import numpy as np"
9697
]
9798
},
99+
{
100+
"cell_type": "code",
101+
"execution_count": null,
102+
"metadata": {},
103+
"outputs": [],
104+
"source": [
105+
"import random"
106+
]
107+
},
98108
{
99109
"cell_type": "code",
100110
"execution_count": null,
@@ -110,8 +120,8 @@
110120
"metadata": {},
111121
"outputs": [],
112122
"source": [
113-
"for idx in np.argpartition(l1b_data.swath_start.values, kth=k_smallest)[:k_smallest]:\n",
114-
" cryoswath.test_plots.waveform.coherence(l1b_data.isel(time_20_ku=[idx]))"
123+
"for idx in random.sample(np.argwhere(l1b_data.swath_start.values==0).flatten().tolist(), k=k_zero):\n",
124+
" ax = cryoswath.test_plots.waveform.coherence(l1b_data.isel(time_20_ku=[idx]))"
115125
]
116126
},
117127
{
@@ -120,8 +130,7 @@
120130
"metadata": {},
121131
"outputs": [],
122132
"source": [
123-
"for idx in np.argpartition(l1b_data.swath_start.values, kth=-k_biggest)[-k_biggest:]:\n",
124-
" cryoswath.test_plots.waveform.coherence(l1b_data.isel(time_20_ku=[idx]))"
133+
"sortkeys = np.argsort(l1b_data.swath_start.values)[sum(l1b_data.swath_start.values==0):]"
125134
]
126135
},
127136
{
@@ -130,7 +139,18 @@
130139
"metadata": {},
131140
"outputs": [],
132141
"source": [
133-
"import random"
142+
"for idx in sortkeys[:k_smallest]:\n",
143+
" ax = cryoswath.test_plots.waveform.coherence(l1b_data.isel(time_20_ku=[idx]))"
144+
]
145+
},
146+
{
147+
"cell_type": "code",
148+
"execution_count": null,
149+
"metadata": {},
150+
"outputs": [],
151+
"source": [
152+
"for idx in sortkeys[-k_biggest:]:\n",
153+
" cryoswath.test_plots.waveform.coherence(l1b_data.isel(time_20_ku=[idx]))"
134154
]
135155
},
136156
{
@@ -139,7 +159,7 @@
139159
"metadata": {},
140160
"outputs": [],
141161
"source": [
142-
"for idx in random.sample(range(len(l1b_data.time_20_ku)), k=k_random):\n",
162+
"for idx in random.sample(sortkeys.tolist(), k=k_random):\n",
143163
" cryoswath.test_plots.waveform.coherence(l1b_data.isel(time_20_ku=[idx]))"
144164
]
145165
}

0 commit comments

Comments
 (0)