Skip to content

Commit 1c2514b

Browse files
committed
upgrade to copernicus datahub and new pyproj version
1 parent 24ba17f commit 1c2514b

File tree

8 files changed

+702
-165
lines changed

8 files changed

+702
-165
lines changed

ost/Project.py

Lines changed: 33 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,12 @@
2525

2626
from ost.helpers import vector as vec, raster as ras
2727
from ost.helpers import scihub, helpers as h, srtm, copdem
28+
from ost.helpers import copernicus as cop
2829
from ost.helpers.settings import set_log_level, setup_logfile, OST_ROOT
2930
from ost.helpers.settings import check_ard_parameters
3031

31-
from ost.s1 import search, refine_inventory, download
32+
from ost.s1 import search_data as search
33+
from ost.s1 import refine_inventory, download
3234
from ost.s1 import burst_inventory, burst_batch
3335
from ost.s1 import grd_batch
3436

@@ -178,55 +180,23 @@ class Sentinel1(Generic):
178180
"""
179181

180182
product_type = None
181-
"TBD"
182-
183183
beam_mode = None
184-
"TBD"
185-
186184
polarisation = None
187-
"TBD"
188-
189185
inventory_file = None
190-
"TBD"
191-
192186
inventory = None
193-
"TBD"
194-
195187
refined_inventory_dict = None
196-
"TBD"
197-
198188
coverages = None
199-
"TBD"
200-
201189
burst_inventory = None
202-
"TBD"
203-
204190
burst_inventory_file = None
205-
"TBD"
206-
207-
scihub_uname = None
208-
"str: the scihub username"
209-
210-
scihub_pword = None
211-
"str: the scihub password"
212-
191+
dataspace_uname = None
192+
dataspace_pword = None
213193
asf_uname = None
214-
"TBD"
215-
216194
asf_pword = None
217-
"TBD"
218-
219195
peps_uname = None
220-
"TBD"
221-
222196
peps_pword = None
223-
"TBD"
224-
225197
onda_uname = None
226-
"TBD"
227-
228198
onda_pword = None
229-
"TBD"
199+
230200

231201
def __init__(
232202
self,
@@ -235,9 +205,9 @@ def __init__(
235205
start="2014-10-01",
236206
end=datetime.today().strftime(OST_DATEFORMAT),
237207
data_mount=None,
238-
product_type="*",
239-
beam_mode="*",
240-
polarisation="*",
208+
product_type=None,
209+
beam_mode=None,
210+
polarisation=None,
241211
log_level=logging.INFO,
242212
):
243213

@@ -247,21 +217,21 @@ def __init__(
247217

248218
# ------------------------------------------
249219
# 2 Check and set product type
250-
if product_type in ["*", "RAW", "SLC", "GRD"]:
220+
if product_type in [None, "RAW", "SLC", "GRD"]:
251221
self.product_type = product_type
252222
else:
253-
raise ValueError("Product type must be one out of '*', 'RAW', " "'SLC', 'GRD'")
223+
raise ValueError("Product type must be one out of None, 'RAW', " "'SLC', 'GRD'")
254224

255225
# ------------------------------------------
256226
# 3 Check and set beam mode
257-
if beam_mode in ["*", "IW", "EW", "SM"]:
227+
if beam_mode in [None, "IW", "EW", "SM"]:
258228
self.beam_mode = beam_mode
259229
else:
260-
raise ValueError("Beam mode must be one out of 'IW', 'EW', 'SM'")
230+
raise ValueError("Beam mode must be one out of None, 'IW', 'EW', 'SM'")
261231

262232
# ------------------------------------------
263233
# 4 Check and set polarisations
264-
possible_pols = ["*", "VV", "VH", "HV", "HH", "VV VH", "HH HV"]
234+
possible_pols = [None, "VV", "VH", "HV", "HH", "VV VH", "HH HV", "*"]
265235
if polarisation in possible_pols:
266236
self.polarisation = polarisation
267237
else:
@@ -292,8 +262,8 @@ def __init__(
292262

293263
# ------------------------------------------
294264
# 7 Initialize uname and pword to None
295-
self.scihub_uname = None
296-
self.scihub_pword = None
265+
self.dataspace_uname = None
266+
self.dataspace_pword = None
297267

298268
self.asf_uname = None
299269
self.asf_pword = None
@@ -310,7 +280,7 @@ def search(
310280
self,
311281
outfile=OST_INVENTORY_FILE,
312282
append=False,
313-
base_url="https://apihub.copernicus.eu/apihub",
283+
base_url="https://catalogue.dataspace.copernicus.eu/resto/api/",
314284
):
315285
"""High Level search function
316286
@@ -332,30 +302,39 @@ def search(
332302
# construct the final query
333303
query = urllib.parse.quote(f"Sentinel-1 AND {product_specs} AND {aoi} AND {toi}")
334304

335-
if not self.scihub_uname or not self.scihub_pword:
305+
# create query
306+
aoi = cop.create_aoi_str(self.aoi)
307+
toi = cop.create_toi_str(self.start, self.end)
308+
specs = cop.create_s1_product_specs(
309+
self.product_type, self.polarisation, self.beam_mode
310+
)
311+
query = aoi + toi + specs + '&maxRecords=100'
312+
313+
if not self.dataspace_uname or not self.dataspace_pword:
336314
# ask for username and password
337-
self.scihub_uname, self.scihub_pword = scihub.ask_credentials()
315+
self.dataspace_uname, self.dataspace_pword = cop.ask_credentials()
338316

339317
# do the search
340318
if outfile == OST_INVENTORY_FILE:
341319
self.inventory_file = self.inventory_dir / OST_INVENTORY_FILE
342320
else:
343321
self.inventory_file = outfile
344322

345-
search.scihub_catalogue(
323+
base_url = base_url + 'collections/Sentinel1/search.json?'
324+
search.dataspace_catalogue(
346325
query,
347326
self.inventory_file,
348327
append,
349-
self.scihub_uname,
350-
self.scihub_pword,
328+
self.dataspace_uname,
329+
self.dataspace_pword,
351330
base_url,
352331
)
353332

354333
if self.inventory_file.exists():
355334
# read inventory into the inventory attribute
356335
self.read_inventory()
357336
else:
358-
logger.info("No images found in the AOI for this date range")
337+
logger.info("No matching scenes found for the specified search parameters")
359338

360339
def read_inventory(self):
361340
"""Read the Sentinel-1 data inventory from a OST invetory shapefile
@@ -568,7 +547,7 @@ def __init__(
568547
data_mount=None,
569548
product_type="SLC",
570549
beam_mode="IW",
571-
polarisation="*",
550+
polarisation="VV VH",
572551
ard_type="OST-GTC",
573552
snap_cpu_parallelism=cpu_count(),
574553
max_workers=1,

ost/helpers/copernicus.py

Lines changed: 178 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,178 @@
1+
"""This module provides helper functions for Copernicus Dataspace API."""
2+
3+
import getpass
4+
import logging
5+
from pathlib import Path
6+
from datetime import datetime as dt
7+
8+
import requests
9+
from shapely.wkt import loads
10+
11+
logger = logging.getLogger(__name__)
12+
13+
def ask_credentials():
14+
"""Interactive function to ask for Copernicus credentials."""
15+
print(
16+
"If you do not have a Copernicus dataspace user account"
17+
" go to: https://dataspace.copernicus.eu/ and register"
18+
)
19+
uname = input("Your Copernicus Dataspace Username:")
20+
pword = getpass.getpass("Your Copernicus Dataspace Password:")
21+
22+
return uname, pword
23+
24+
25+
def get_access_token(username, password: None):
26+
27+
if not password:
28+
logger.info(' Please provide your Copernicus Dataspace password:')
29+
password = getpass.getpass()
30+
31+
data = {
32+
"client_id": "cdse-public",
33+
"username": username,
34+
"password": password,
35+
"grant_type": "password",
36+
}
37+
try:
38+
r = requests.post(
39+
"https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token",
40+
data=data,
41+
)
42+
r.raise_for_status()
43+
except Exception as e:
44+
raise Exception(
45+
f"Access token creation failed. Reponse from the server was: {r.json()}"
46+
)
47+
return r.json()["access_token"]
48+
49+
50+
def refresh_access_token(refresh_token: str) -> str:
51+
data = {
52+
"client_id": "cdse-public",
53+
"refresh_token": refresh_token,
54+
"grant_type": "refresh_token",
55+
}
56+
57+
try:
58+
r = requests.post(
59+
"https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token",
60+
data=data,
61+
)
62+
r.raise_for_status()
63+
except Exception as e:
64+
raise Exception(
65+
f"Access token refresh failed. Reponse from the server was: {r.json()}"
66+
)
67+
68+
return r.json()["access_token"]
69+
70+
71+
def create_aoi_str(aoi):
72+
"""Convert WKT formatted AOI to dataspace's geometry attribute."""
73+
# load to shapely geometry to easily test for geometry type
74+
geom = loads(aoi)
75+
76+
# dependent on the type construct the query string
77+
if geom.geom_type == "Point":
78+
return f'&lon={geom.y}&lat={geom.x}'
79+
80+
else:
81+
# simplify geometry, as we might otherwise bump into too long string issue
82+
aoi_convex = geom.convex_hull
83+
84+
# create scihub-confrom aoi string
85+
return f'&geometry={aoi_convex}'
86+
87+
def create_toi_str(start="2014-10-01", end=dt.now().strftime("%Y-%m-%d")):
88+
"""Convert start and end date to scihub's search url time period attribute."""
89+
# bring start and end date to query format
90+
return f"&startDate={start}T00:00:00Z&completionDate={end}T23:59:59Z"
91+
92+
def create_s1_product_specs(product_type=None, polarisation=None, beam=None):
93+
"""Convert Sentinel-1's product metadata to scihub's product attributes."""
94+
# transform product type, polarisation and beam to query format
95+
product_type_query = f'&productType={product_type}' if product_type else ''
96+
polarisation_query = f'&polarisation={polarisation.replace(" ", "%26")}' if polarisation else ''
97+
sensor_mode_query = f'&sensorMode={beam}' if beam else ''
98+
99+
return product_type_query + polarisation_query + sensor_mode_query
100+
101+
102+
def extract_basic_metadata(properties):
103+
104+
# those are the things we wnat out of the standard json
105+
wanted = ['title', 'orbitDirection', 'platform', 'polarisation', 'swath', 'thumbnail', 'published']
106+
107+
# loop through all properties
108+
_dict = {}
109+
for k, v in properties.items():
110+
# consider if in the list of wanted properties
111+
if k in wanted:
112+
if k == 'polarisation':
113+
# remove & sign
114+
_dict[k] = v.replace('&', ' ')
115+
elif k == 'title':
116+
# remove .SAFE extension
117+
_dict[k] = v[:-5]
118+
elif k == 'thumbnail':
119+
_dict[k] = '/'.join(v.split('/')[:-2]) + '/manifest.safe'
120+
else:
121+
_dict[k] = v
122+
123+
sorted_dict = dict(sorted(_dict.items(), key=lambda item: wanted.index(item[0])))
124+
return sorted_dict.values()
125+
126+
127+
def get_entry(line):
128+
129+
return line.split('>')[1].split('<')[0]
130+
131+
132+
def get_advanced_metadata(metafile, access_token):
133+
134+
with requests.Session() as session:
135+
headers={'Authorization': f'Bearer {access_token}'}
136+
request = session.request("get", metafile)
137+
response = session.get(request.url, headers=headers, stream=True)
138+
139+
for line in response.iter_lines():
140+
141+
line = line.decode('utf-8')
142+
if 's1sarl1:sliceNumber' in line:
143+
slicenumber = get_entry(line)
144+
if 's1sarl1:totalSlices' in line:
145+
total_slices = get_entry(line)
146+
if 'relativeOrbitNumber type="start"' in line:
147+
relativeorbit = get_entry(line)
148+
if 'relativeOrbitNumber type="stop"' in line:
149+
lastrelativeorbit = get_entry(line)
150+
if 'safe:nssdcIdentifier' in line:
151+
platformidentifier = get_entry(line)
152+
if 's1sarl1:missionDataTakeID' in line:
153+
missiondatatakeid = get_entry(line)
154+
if 's1sarl1:mode' in line:
155+
sensoroperationalmode = get_entry(line)
156+
if 'orbitNumber type="start"' in line:
157+
orbitnumber = get_entry(line)
158+
if 'orbitNumber type="stop"' in line:
159+
lastorbitnumber = get_entry(line)
160+
if 'safe:startTime' in line:
161+
beginposition = get_entry(line)
162+
if 'safe:stopTime' in line:
163+
endposition = get_entry(line)
164+
if '1sarl1:productType' in line:
165+
product_type = get_entry(line)
166+
167+
# add acquisitiondate
168+
acqdate = dt.strftime(dt.strptime(beginposition, '%Y-%m-%dT%H:%M:%S.%f'), format='%Y%m%d')
169+
170+
return (
171+
slicenumber, total_slices,
172+
relativeorbit, lastrelativeorbit,
173+
platformidentifier, missiondatatakeid,
174+
sensoroperationalmode, product_type,
175+
orbitnumber, lastorbitnumber,
176+
beginposition, endposition, acqdate,
177+
0 # placeholder for size
178+
)

ost/helpers/raster.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def polygonize_ls(infile, outfile, driver="GeoJSON"):
4242
outfile,
4343
"w",
4444
driver=driver,
45-
crs=pyproj.Proj(src.crs).srs,
45+
crs=src.crs,
4646
schema={"properties": [("raster_val", "int")], "geometry": "Polygon"},
4747
) as dst:
4848
dst.writerecords(results)
@@ -105,7 +105,7 @@ def polygonize_bounds(infile, outfile, mask_value=1, driver="GeoJSON"):
105105
outfile,
106106
"w",
107107
driver=driver,
108-
crs=pyproj.Proj(src.crs).srs,
108+
crs=src.crs,
109109
schema={"properties": [("raster_val", "int")], "geometry": "MultiPolygon"},
110110
) as dst:
111111
dst.writerecords(results)

ost/s1/burst_batch.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ def bursts_to_ards(burst_gdf, config_file):
6666

6767
logger.info("Preparing the processing pipeline. This may take a moment.")
6868
proc_inventory = prepare_burst_inventory(burst_gdf, config_file)
69+
#print(proc_inventory)
6970

7071
with open(config_file, "r") as file:
7172
config_dict = json.load(file)

0 commit comments

Comments
 (0)