|
23 | 23 | "source": [ |
24 | 24 | "import coincident\n", |
25 | 25 | "import geopandas as gpd\n", |
26 | | - "from shapely.geometry import box" |
| 26 | + "from shapely.geometry import box\n", |
| 27 | + "import matplotlib.pyplot as plt\n", |
| 28 | + "# %config InlineBackend.figure_format = 'retina'" |
27 | 29 | ] |
28 | 30 | }, |
29 | 31 | { |
|
33 | 35 | "source": [ |
34 | 36 | "## Search \n", |
35 | 37 | "\n", |
36 | | - "We'll search for data in Maryland" |
| 38 | + "We'll search for data in Maryland in July 2017" |
37 | 39 | ] |
38 | 40 | }, |
39 | 41 | { |
|
49 | 51 | "aoi = aoi.simplify(0.01)\n", |
50 | 52 | "m = aoi.explore(color=\"black\")\n", |
51 | 53 | "\n", |
52 | | - "gf_gliht = coincident.search.search(\n", |
| 54 | + "gf = coincident.search.search(\n", |
53 | 55 | " dataset=\"gliht\",\n", |
54 | 56 | " intersects=aoi,\n", |
55 | | - " datetime=[\"2017-07-31\"],\n", |
| 57 | + " datetime=\"2017-07\",\n", |
56 | 58 | ")\n", |
57 | | - "print(f\"Found {len(gf_gliht)} G-LiHT scenes\")\n", |
58 | | - "gf_gliht.explore(m=m, color=\"magenta\")" |
| 59 | + "\n", |
| 60 | + "print(f\"Found {len(gf)} G-LiHT Items\")\n", |
| 61 | + "print(f\"G-LiHT collections: {gf.collection.unique()}\")\n", |
| 62 | + "gf.explore(m=m, color=\"magenta\")" |
59 | 63 | ] |
60 | 64 | }, |
61 | 65 | { |
62 | | - "cell_type": "code", |
63 | | - "execution_count": null, |
| 66 | + "cell_type": "markdown", |
64 | 67 | "id": "4", |
65 | 68 | "metadata": {}, |
66 | | - "outputs": [], |
67 | 69 | "source": [ |
68 | | - "# Subset a particular scene\n", |
69 | | - "gf_gliht = gf_gliht.iloc[[0]]\n", |
70 | | - "c = gf_gliht.geometry.centroid\n", |
71 | | - "mini_aoi = gpd.GeoDataFrame(\n", |
72 | | - " geometry=[box(c.x - 0.0045, c.y - 0.0045, c.x + 0.0045, c.y + 0.0045)],\n", |
73 | | - " crs=\"EPSG:4326\",\n", |
74 | | - ")" |
| 70 | + "## Get Data\n", |
| 71 | + "\n", |
| 72 | + "NASA G-LiHT has many different provided gridded datasets. The following collections below are the current datasets supported by `coincident`.\n", |
| 73 | + "\n", |
| 74 | + "| Collection | Description |\n", |
| 75 | + "| -- | -- | \n", |
| 76 | + "| GLORTHO_001 | orthorectified high-resolution aerial photography |\n", |
| 77 | + "| GLCHMT_001 | maximum canopy height and canopy variability information |\n", |
| 78 | + "| GLDSMT_001 | Digital Surface Model, Mean, Aspect, Rugosity, and Slope |\n", |
| 79 | + "| GLDTMT_001 | bare earth elevation, aspect and slope on the EGM96 Geoid |\n", |
| 80 | + "| GLLIDARPC_001 | LiDAR Point Cloud data product (LAS format) |\n", |
| 81 | + "\n", |
| 82 | + "```{note}\n", |
| 83 | + "Not all G-LiHT flights will contain every single product listed. For example, a flight may have 'dsm' data but not 'ortho' data.\n", |
| 84 | + "```" |
75 | 85 | ] |
76 | 86 | }, |
77 | 87 | { |
|
81 | 91 | "metadata": {}, |
82 | 92 | "outputs": [], |
83 | 93 | "source": [ |
84 | | - "m = gf_gliht.explore(color=\"black\")\n", |
85 | | - "mini_aoi.explore(color=\"magenta\", m=m)" |
| 94 | + "# Subset a particular scene\n", |
| 95 | + "collection = \"GLDTMT_001\"\n", |
| 96 | + "gf_gliht = gf.query(f'collection == \"{collection}\"')\n", |
| 97 | + "gf_gliht.id" |
86 | 98 | ] |
87 | 99 | }, |
88 | 100 | { |
89 | | - "cell_type": "markdown", |
| 101 | + "cell_type": "code", |
| 102 | + "execution_count": null, |
90 | 103 | "id": "6", |
91 | 104 | "metadata": {}, |
| 105 | + "outputs": [], |
92 | 106 | "source": [ |
93 | | - "## Get Data\n", |
94 | | - "\n", |
95 | | - "NASA G-LiHT has many different provided gridded datasets. The following collections below are the current datasets supported by `coincident`.\n", |
96 | | - "\n", |
97 | | - "```python\n", |
98 | | - " - 'ortho': Orthorectified aerial imagery\n", |
99 | | - " - 'chm': Canopy height model\n", |
100 | | - " - 'dsm': Digital surface model\n", |
101 | | - " - 'dtm': Digital terrain model\n", |
102 | | - " - 'hyperspectral_ancillary': Ancillary HSI data\n", |
103 | | - " - 'radiance': Hyperspectral aradiance\n", |
104 | | - " - 'reflectance': Hyperspectral surface reflectance\n", |
105 | | - " - 'hyperspectral_vegetation': HSI-derived veg indices\n", |
106 | | - "```\n", |
107 | | - "\n", |
108 | | - "```{note}\n", |
109 | | - "Not all G-LiHT flights will contain every single product listed. For example, a flight may have 'dsm' data but not 'ortho' data.\n", |
110 | | - "```\n", |
111 | | - "\n", |
112 | | - "### Stream \n", |
113 | | - "\n", |
114 | | - "First we'll show how to stream gridded DEMs directly into Xarray" |
| 107 | + "# Just use the first one\n", |
| 108 | + "gs_item = gf_gliht.iloc[0]\n", |
| 109 | + "c = gs_item.geometry.centroid\n", |
| 110 | + "mini_aoi = gpd.GeoDataFrame(\n", |
| 111 | + " geometry=[box(c.x - 0.0045, c.y - 0.0045, c.x + 0.0045, c.y + 0.0045)],\n", |
| 112 | + " crs=\"EPSG:4326\",\n", |
| 113 | + ")" |
115 | 114 | ] |
116 | 115 | }, |
117 | 116 | { |
|
121 | 120 | "metadata": {}, |
122 | 121 | "outputs": [], |
123 | 122 | "source": [ |
124 | | - "# we'll need the dataset id from our initial search\n", |
125 | | - "gf_gliht.id.item()" |
| 123 | + "m = gf_gliht.explore(column=\"id\")\n", |
| 124 | + "mini_aoi.explore(m=m, color=\"red\", style_kwds={\"fill\": False, \"weight\": 3})" |
126 | 125 | ] |
127 | 126 | }, |
128 | 127 | { |
129 | 128 | "cell_type": "markdown", |
130 | 129 | "id": "8", |
131 | 130 | "metadata": {}, |
| 131 | + "source": [ |
| 132 | + "### Stream \n", |
| 133 | + "\n", |
| 134 | + "First we'll show how to stream a subset of a gridded DSM directly into Xarray" |
| 135 | + ] |
| 136 | + }, |
| 137 | + { |
| 138 | + "cell_type": "markdown", |
| 139 | + "id": "9", |
| 140 | + "metadata": {}, |
132 | 141 | "source": [ |
133 | 142 | "```{important}\n", |
134 | 143 | "Unlike the G-LiHT search, you will need NASA Earthdata credentials (aka EarthData Login (EDL)) to read in and download the gridded datasets from G-LiHT. This requires creating an [EDL Token](https://urs.earthdata.nasa.gov/documentation/for_users/user_token) and making sure you've set the environment variable `EARTHDATA_TOKEN=xxxxx`\n", |
|
138 | 147 | { |
139 | 148 | "cell_type": "code", |
140 | 149 | "execution_count": null, |
141 | | - "id": "9", |
| 150 | + "id": "10", |
142 | 151 | "metadata": {}, |
143 | 152 | "outputs": [], |
144 | 153 | "source": [ |
145 | 154 | "%%time\n", |
146 | | - "ds = coincident.io.xarray.load_gliht_raster(\n", |
| 155 | + "\n", |
| 156 | + "da = coincident.io.xarray.load_gliht(\n", |
| 157 | + " item=gs_item,\n", |
147 | 158 | " aoi=mini_aoi,\n", |
148 | | - " dataset_id=gf_gliht.id.item(),\n", |
149 | | - " product=\"chm\",\n", |
150 | 159 | ")\n", |
151 | | - "ds" |
| 160 | + "da" |
152 | 161 | ] |
153 | 162 | }, |
154 | 163 | { |
155 | 164 | "cell_type": "code", |
156 | 165 | "execution_count": null, |
157 | | - "id": "10", |
| 166 | + "id": "11", |
158 | 167 | "metadata": {}, |
159 | 168 | "outputs": [], |
160 | 169 | "source": [ |
161 | | - "# Convert to in-memory DataArray\n", |
162 | | - "da_chm = ds[\"CHM\"].squeeze().load()" |
| 170 | + "coincident.plot.plot_dem(da)\n", |
| 171 | + "plt.title(gs_item.id);" |
| 172 | + ] |
| 173 | + }, |
| 174 | + { |
| 175 | + "cell_type": "markdown", |
| 176 | + "id": "12", |
| 177 | + "metadata": {}, |
| 178 | + "source": [ |
| 179 | + "### Download\n", |
| 180 | + "\n", |
| 181 | + "#### Download multiple STAC Item Assets\n", |
| 182 | + "\n", |
| 183 | + "This will put STAC metadata, extended metadata, a browse/thumbnail image, and a TIF in a specified folder" |
163 | 184 | ] |
164 | 185 | }, |
165 | 186 | { |
166 | 187 | "cell_type": "code", |
167 | 188 | "execution_count": null, |
168 | | - "id": "11", |
| 189 | + "id": "13", |
169 | 190 | "metadata": {}, |
170 | 191 | "outputs": [], |
171 | 192 | "source": [ |
172 | | - "# Plot\n", |
173 | | - "da_chm.plot.imshow(cmap=\"Greens\");" |
| 193 | + "# back to a GeoDataFrame from a Series\n", |
| 194 | + "gfi = gpd.GeoDataFrame([gs_item])\n", |
| 195 | + "\n", |
| 196 | + "# And to a Pystac Item\n", |
| 197 | + "items = coincident.search.to_pystac_items(gfi)\n", |
| 198 | + "items[0]" |
| 199 | + ] |
| 200 | + }, |
| 201 | + { |
| 202 | + "cell_type": "code", |
| 203 | + "execution_count": null, |
| 204 | + "id": "14", |
| 205 | + "metadata": {}, |
| 206 | + "outputs": [], |
| 207 | + "source": [ |
| 208 | + "# Download the item assets (browse, metadata, thumbnail)\n", |
| 209 | + "local_item = await coincident.io.download.download_item(\n", |
| 210 | + " items[0],\n", |
| 211 | + " path=\"/tmp/gliht/\",\n", |
| 212 | + ")" |
174 | 213 | ] |
175 | 214 | }, |
176 | 215 | { |
177 | 216 | "cell_type": "markdown", |
178 | | - "id": "12", |
| 217 | + "id": "15", |
179 | 218 | "metadata": {}, |
180 | 219 | "source": [ |
181 | | - "### Download \n", |
182 | | - "\n", |
183 | | - "```{note}\n", |
184 | | - "Since there are so many gridded products for G-LiHT, you can specify multiple products to download in one call. This download will automatically skip any products that aren't available for the specific flight among the current datasets supported by `coincident`\n", |
185 | | - "```" |
| 220 | + "#### Download a single file" |
186 | 221 | ] |
187 | 222 | }, |
188 | 223 | { |
189 | 224 | "cell_type": "code", |
190 | 225 | "execution_count": null, |
191 | | - "id": "13", |
| 226 | + "id": "16", |
192 | 227 | "metadata": {}, |
193 | 228 | "outputs": [], |
194 | 229 | "source": [ |
195 | | - "coincident.io.download.download_gliht_raster(\n", |
196 | | - " aoi=mini_aoi,\n", |
197 | | - " dataset_id=gf_gliht.id.item(),\n", |
198 | | - " products=[\"chm\", \"dsm\", \"dtm\"],\n", |
199 | | - " output_dir=\"/tmp\",\n", |
200 | | - ")" |
| 230 | + "tif_key = [x for x in gs_item.assets.keys() if x.startswith(\"001\")][0]\n", |
| 231 | + "href = gs_item.assets[tif_key][\"href\"]\n", |
| 232 | + "coincident.io.download.download_files([href], \"/tmp/\")" |
201 | 233 | ] |
202 | 234 | } |
203 | 235 | ], |
|
0 commit comments