8
8
import string
9
9
import requests
10
10
import warnings
11
+
11
12
from pkg_resources import resource_filename
12
13
from bs4 import BeautifulSoup
13
14
import pyvo
14
-
15
15
from urllib .parse import urljoin
16
+
16
17
from astropy .table import Table , Column , vstack
17
18
from astroquery import log
18
- from astropy .utils import deprecated
19
19
from astropy .utils .console import ProgressBar
20
- from astropy .utils .exceptions import AstropyDeprecationWarning
21
20
from astropy import units as u
22
21
from astropy .time import Time
23
22
from pyvo .dal .sia2 import SIA_PARAMETERS_DESC
@@ -236,7 +235,7 @@ def tap(self):
236
235
self ._tap = pyvo .dal .tap .TAPService (baseurl = self .tap_url )
237
236
return self ._tap
238
237
239
- def query_object_async (self , object_name , cache = None , public = True ,
238
+ def query_object_async (self , object_name , * , public = True ,
240
239
science = True , payload = None , ** kwargs ):
241
240
"""
242
241
Query the archive for a source name.
@@ -245,7 +244,6 @@ def query_object_async(self, object_name, cache=None, public=True,
245
244
----------
246
245
object_name : str
247
246
The object name. Will be resolved by astropy.coord.SkyCoord
248
- cache : deprecated
249
247
public : bool
250
248
True to return only public datasets, False to return private only,
251
249
None to return both
@@ -262,7 +260,7 @@ def query_object_async(self, object_name, cache=None, public=True,
262
260
return self .query_async (public = public , science = science ,
263
261
payload = payload , ** kwargs )
264
262
265
- def query_region_async (self , coordinate , radius , cache = None , public = True ,
263
+ def query_region_async (self , coordinate , radius , * , public = True ,
266
264
science = True , payload = None , ** kwargs ):
267
265
"""
268
266
Query the ALMA archive with a source name and radius
@@ -273,8 +271,6 @@ def query_region_async(self, coordinate, radius, cache=None, public=True,
273
271
the identifier or coordinates around which to query.
274
272
radius : str / `~astropy.units.Quantity`, optional
275
273
the radius of the region
276
- cache : Deprecated
277
- Cache the query?
278
274
public : bool
279
275
True to return only public datasets, False to return private only,
280
276
None to return both
@@ -299,18 +295,15 @@ def query_region_async(self, coordinate, radius, cache=None, public=True,
299
295
return self .query_async (public = public , science = science ,
300
296
payload = payload , ** kwargs )
301
297
302
- def query_async (self , payload , cache = None , public = True , science = True ,
303
- legacy_columns = False , max_retries = None ,
304
- get_html_version = None ,
305
- get_query_payload = None , ** kwargs ):
298
+ def query_async (self , payload , * , public = True , science = True ,
299
+ legacy_columns = False , get_query_payload = None , ** kwargs ):
306
300
"""
307
301
Perform a generic query with user-specified payload
308
302
309
303
Parameters
310
304
----------
311
305
payload : dictionary
312
306
Please consult the `help` method
313
- cache : deprecated
314
307
public : bool
315
308
True to return only public datasets, False to return private only,
316
309
None to return both
@@ -327,17 +320,6 @@ def query_async(self, payload, cache=None, public=True, science=True,
327
320
Table with results. Columns are those in the ALMA ObsCore model
328
321
(see ``help_tap``) unless ``legacy_columns`` argument is set to True.
329
322
"""
330
- local_args = dict (locals ().items ())
331
-
332
- for arg in local_args :
333
- # check if the deprecated attributes have been used
334
- for dep in ['cache' , 'max_retries' , 'get_html_version' ]:
335
- if arg [0 ] == dep and arg [1 ] is not None :
336
- warnings .warn (
337
- ("Argument '{}' has been deprecated "
338
- "since version 4.0.1 and will be ignored" .format (arg [0 ])),
339
- AstropyDeprecationWarning )
340
- del kwargs [arg [0 ]]
341
323
342
324
if payload is None :
343
325
payload = {}
@@ -385,7 +367,7 @@ def query_async(self, payload, cache=None, public=True, science=True,
385
367
return legacy_result
386
368
return result
387
369
388
- def query_sia (self , pos = None , band = None , time = None , pol = None ,
370
+ def query_sia (self , * , pos = None , band = None , time = None , pol = None ,
389
371
field_of_view = None , spatial_resolution = None ,
390
372
spectral_resolving_power = None , exptime = None ,
391
373
timeres = None , publisher_did = None ,
@@ -500,54 +482,7 @@ def _get_dataarchive_url(self):
500
482
"on github." )
501
483
return self .dataarchive_url
502
484
503
- @deprecated (since = "v0.4.1" , alternative = "get_data_info" )
504
- def stage_data (self , uids , expand_tarfiles = False , return_json = False ):
505
- """
506
- Obtain table of ALMA files
507
-
508
- DEPRECATED: Data is no longer staged. This method is deprecated and
509
- kept here for backwards compatibility reasons but it's not fully
510
- compatible with the original implementation.
511
-
512
- Parameters
513
- ----------
514
- uids : list or str
515
- A list of valid UIDs or a single UID.
516
- UIDs should have the form: 'uid://A002/X391d0b/X7b'
517
- expand_tarfiles : DEPRECATED
518
- return_json : DEPRECATED
519
- Note: The returned astropy table can be easily converted to json
520
- through pandas:
521
- output = StringIO()
522
- stage_data(...).to_pandas().to_json(output)
523
- table_json = output.getvalue()
524
-
525
- Returns
526
- -------
527
- data_file_table : Table
528
- A table containing 3 columns: the UID, the file URL (for future
529
- downloading), and the file size
530
- """
531
-
532
- if return_json :
533
- raise AttributeError (
534
- 'return_json is deprecated. See method docs for a workaround' )
535
- table = Table ()
536
- res = self .get_data_info (uids , expand_tarfiles = expand_tarfiles )
537
- p = re .compile (r'.*(uid__.*)\.asdm.*' )
538
- if res :
539
- table ['name' ] = [u .split ('/' )[- 1 ] for u in res ['access_url' ]]
540
- table ['id' ] = [p .search (x ).group (1 ) if 'asdm' in x else 'None'
541
- for x in table ['name' ]]
542
- table ['type' ] = res ['content_type' ]
543
- table ['size' ] = res ['content_length' ]
544
- table ['permission' ] = ['UNKNOWN' ] * len (res )
545
- table ['mous_uid' ] = [uids ] * len (res )
546
- table ['URL' ] = res ['access_url' ]
547
- table ['isProprietary' ] = res ['readable' ]
548
- return table
549
-
550
- def get_data_info (self , uids , expand_tarfiles = False ,
485
+ def get_data_info (self , uids , * , expand_tarfiles = False ,
551
486
with_auxiliary = True , with_rawdata = True ):
552
487
553
488
"""
@@ -685,7 +620,7 @@ def _HEADER_data_size(self, files):
685
620
686
621
return data_sizes , totalsize .to (u .GB )
687
622
688
- def download_files (self , files , savedir = None , cache = True ,
623
+ def download_files (self , files , * , savedir = None , cache = True ,
689
624
continuation = True , skip_unauthorized = True ,
690
625
verify_only = False ):
691
626
"""
@@ -821,7 +756,7 @@ def _parse_result(self, response, verbose=False):
821
756
822
757
return response
823
758
824
- def retrieve_data_from_uid (self , uids , cache = True ):
759
+ def retrieve_data_from_uid (self , uids , * , cache = True ):
825
760
"""
826
761
Stage & Download ALMA data. Will print out the expected file size
827
762
before attempting the download.
@@ -854,7 +789,7 @@ def retrieve_data_from_uid(self, uids, cache=True):
854
789
downloaded_files = self .download_files (file_urls )
855
790
return downloaded_files
856
791
857
- def _get_auth_info (self , username , store_password = False ,
792
+ def _get_auth_info (self , username , * , store_password = False ,
858
793
reenter_password = False ):
859
794
"""
860
795
Get the auth info (user, password) for use in another function
@@ -1032,7 +967,7 @@ def cycle0_table(self):
1032
967
self ._cycle0_table .rename_column ('col2' , 'uid' )
1033
968
return self ._cycle0_table
1034
969
1035
- def get_files_from_tarballs (self , downloaded_files , regex = r'.*\.fits$' ,
970
+ def get_files_from_tarballs (self , downloaded_files , * , regex = r'.*\.fits$' ,
1036
971
path = 'cache_path' , verbose = True ):
1037
972
"""
1038
973
Given a list of successfully downloaded tarballs, extract files
@@ -1082,7 +1017,7 @@ def get_files_from_tarballs(self, downloaded_files, regex=r'.*\.fits$',
1082
1017
1083
1018
return filelist
1084
1019
1085
- def download_and_extract_files (self , urls , delete = True , regex = r'.*\.fits$' ,
1020
+ def download_and_extract_files (self , urls , * , delete = True , regex = r'.*\.fits$' ,
1086
1021
include_asdm = False , path = 'cache_path' ,
1087
1022
verbose = True ):
1088
1023
"""
@@ -1196,53 +1131,7 @@ def help(self, cache=True):
1196
1131
print ("Alma.query(payload=dict(project_code='2017.1.01355.L', "
1197
1132
"source_name_alma='G008.67'))" )
1198
1133
1199
- def _json_summary_to_table (self , data , base_url ):
1200
- """
1201
- Special tool to convert some JSON metadata to a table Obsolete as of
1202
- March 2020 - should be removed along with stage_data_prefeb2020
1203
- """
1204
- from ..utils import url_helpers
1205
- columns = {'mous_uid' : [], 'URL' : [], 'size' : []}
1206
- for entry in data ['node_data' ]:
1207
- # de_type can be useful (e.g., MOUS), but it is not necessarily
1208
- # specified
1209
- # file_name and file_key *must* be specified.
1210
- is_file = \
1211
- (entry ['file_name' ] != 'null' and entry ['file_key' ] != 'null' )
1212
- if is_file :
1213
- # "de_name": "ALMA+uid://A001/X122/X35e",
1214
- columns ['mous_uid' ].append (entry ['de_name' ][5 :])
1215
- if entry ['file_size' ] == 'null' :
1216
- columns ['size' ].append (np .nan * u .Gbyte )
1217
- else :
1218
- columns ['size' ].append (
1219
- (int (entry ['file_size' ]) * u .B ).to (u .Gbyte ))
1220
- # example template for constructing url:
1221
- # https://almascience.eso.org/dataPortal/requests/keflavich/940238268/ALMA/
1222
- # uid___A002_X9d6f4c_X154/2013.1.00546.S_uid___A002_X9d6f4c_X154.asdm.sdm.tar
1223
- # above is WRONG... except for ASDMs, when it's right
1224
- # should be:
1225
- # 2013.1.00546.S_uid___A002_X9d6f4c_X154.asdm.sdm.tar/2013.1.00546.S_uid___A002_X9d6f4c_X154.asdm.sdm.tar
1226
- #
1227
- # apparently ASDMs are different from others:
1228
- # templates:
1229
- # https://almascience.eso.org/dataPortal/requests/keflavich/946895898/ALMA/
1230
- # 2013.1.00308.S_uid___A001_X196_X93_001_of_001.tar/2013.1.00308.S_uid___A001_X196_X93_001_of_001.tar
1231
- # uid___A002_X9ee74a_X26f0/2013.1.00308.S_uid___A002_X9ee74a_X26f0.asdm.sdm.tar
1232
- url = url_helpers .join (base_url ,
1233
- entry ['file_key' ],
1234
- entry ['file_name' ])
1235
- if 'null' in url :
1236
- raise ValueError ("The URL {0} was created containing "
1237
- "'null', which is invalid." .format (url ))
1238
- columns ['URL' ].append (url )
1239
-
1240
- columns ['size' ] = u .Quantity (columns ['size' ], u .Gbyte )
1241
-
1242
- tbl = Table ([Column (name = k , data = v ) for k , v in columns .items ()])
1243
- return tbl
1244
-
1245
- def get_project_metadata (self , projectid , cache = True ):
1134
+ def get_project_metadata (self , projectid , * , cache = True ):
1246
1135
"""
1247
1136
Get the metadata - specifically, the project abstract - for a given project ID.
1248
1137
"""
0 commit comments