Skip to content

Commit ca79a32

Browse files
committed
📦 Detect active subglacial lakes up to 20201224
Re-running the clustering algorithm to detect Antarctic subglacial lakes with ICESat-2 ATL11 data up to 20201224. There are now 193 potential active lakes compared to 194 before. Keeping the same DBSCAN hyperparameters as in the last run at 6bbd583, specifically an eps of 3000 and min_samples of 300. On the Siple Coast, Kamb 10 has showed up on Kamb Ice Stream, Lake WXI has expanded in size with the addition of a new lobe, and Lake 78 is now just Lake 8 ... because three long mini-clusters (part of Lake 7) has disappeared. Also refactored atl06_to_atl11.py slightly according to some Sourcery suggestions.
1 parent 1db62e9 commit ca79a32

File tree

7 files changed

+539
-515
lines changed

7 files changed

+539
-515
lines changed

antarctic_subglacial_lakes_3031.geojson

Lines changed: 193 additions & 194 deletions
Large diffs are not rendered by default.

antarctic_subglacial_lakes_4326.geojson

Lines changed: 193 additions & 194 deletions
Large diffs are not rendered by default.

atl06_to_atl11.ipynb

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@
111111
"catalog = intake.open_catalog(\"deepicedrain/atlas_catalog.yaml\")\n",
112112
"with open(file=\"ATL11_to_download.txt\", mode=\"r\") as f:\n",
113113
" urlpaths = f.readlines()\n",
114-
"dates: set = set(url.split(\"/\")[-2] for url in urlpaths)\n",
114+
"dates: set = {url.split(\"/\")[-2] for url in urlpaths}\n",
115115
"len(dates)"
116116
]
117117
},
@@ -138,16 +138,17 @@
138138
"cell_type": "code",
139139
"execution_count": null,
140140
"metadata": {
141-
"lines_to_next_cell": 2
141+
"lines_to_next_cell": 0
142142
},
143143
"outputs": [],
144144
"source": [
145145
"# Check download progress here, https://stackoverflow.com/a/37901797/6611055\n",
146-
"responses = []\n",
147-
"for f in tqdm.tqdm(\n",
148-
" iterable=dask.distributed.as_completed(futures=futures), total=len(futures)\n",
149-
"):\n",
150-
" responses.append(f.result())"
146+
"responses = [\n",
147+
" f.result()\n",
148+
" for f in tqdm.tqdm(\n",
149+
" iterable=dask.distributed.as_completed(futures=futures), total=len(futures)\n",
150+
" )\n",
151+
"]"
151152
]
152153
},
153154
{

atl06_to_atl11.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@
5757
catalog = intake.open_catalog("deepicedrain/atlas_catalog.yaml")
5858
with open(file="ATL11_to_download.txt", mode="r") as f:
5959
urlpaths = f.readlines()
60-
dates: set = set(url.split("/")[-2] for url in urlpaths)
60+
dates: set = {url.split("/")[-2] for url in urlpaths}
6161
len(dates)
6262

6363
# %%
@@ -75,13 +75,12 @@
7575

7676
# %%
7777
# Check download progress here, https://stackoverflow.com/a/37901797/6611055
78-
responses = []
79-
for f in tqdm.tqdm(
80-
iterable=dask.distributed.as_completed(futures=futures), total=len(futures)
81-
):
82-
responses.append(f.result())
83-
84-
78+
responses = [
79+
f.result()
80+
for f in tqdm.tqdm(
81+
iterable=dask.distributed.as_completed(futures=futures), total=len(futures)
82+
)
83+
]
8584
# %%
8685

8786
# %% [markdown]

0 commit comments

Comments
 (0)