Skip to content

Commit a8c1831

Browse files
committed
use names over IDs and change all sorts of table heading formats etc.
1 parent d5b45b7 commit a8c1831

File tree

3 files changed

+69
-68
lines changed

3 files changed

+69
-68
lines changed

astroquery/alma/core.py

Lines changed: 46 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def query_region_async(self, coordinate, radius, cache=True, public=True,
9898

9999
if payload is None:
100100
payload = {}
101-
payload.update({'raDecCoordinates': rdc})
101+
payload.update({'ra_dec': rdc})
102102

103103
return self.query_async(payload, cache=cache, public=public,
104104
science=science, **kwargs)
@@ -381,7 +381,7 @@ def _parse_result(self, response, verbose=False):
381381
tf = six.BytesIO(response.content)
382382
vo_tree = votable.parse(tf, pedantic=False, invalid='mask')
383383
first_table = vo_tree.get_first_table()
384-
table = first_table.to_table()
384+
table = first_table.to_table(use_names_over_ids=True)
385385
return table
386386

387387
def _login(self, username, store_password=False):
@@ -714,24 +714,26 @@ def _parse_staging_request_page(self, data_list_page):
714714

715715
root = BeautifulSoup(data_list_page.content, 'html5lib')
716716

717-
for link in root.findAll('a'):
718-
if 'script.sh' in link.text:
719-
download_script_url = urljoin(self.dataarchive_url,
720-
link['href'])
721-
722-
download_script = self._request('GET', download_script_url,
723-
cache=False)
724-
download_script_target_urls = []
725-
for line in download_script.text.split('\n'):
726-
if line and line.split() and line.split()[0] == 'wget':
727-
download_script_target_urls.append(line.split()[1].strip('"'))
728-
729-
if len(download_script_target_urls) == 0:
730-
raise RemoteServiceError("There was an error parsing the download "
731-
"script; it is empty. "
732-
"You can access the download script "
733-
"directly from this URL: "
734-
"{0}".format(download_script_url))
717+
#for link in root.findAll('a'):
718+
# if 'script.sh' in link.text:
719+
# download_script_url = urljoin(self.dataarchive_url,
720+
# link['href'])
721+
#if 'download_script_url' not in locals():
722+
# raise RemoteServiceError("No download links were found.")
723+
724+
#download_script = self._request('GET', download_script_url,
725+
# cache=False)
726+
#download_script_target_urls = []
727+
#for line in download_script.text.split('\n'):
728+
# if line and line.split() and line.split()[0] == 'wget':
729+
# download_script_target_urls.append(line.split()[1].strip('"'))
730+
731+
#if len(download_script_target_urls) == 0:
732+
# raise RemoteServiceError("There was an error parsing the download "
733+
# "script; it is empty. "
734+
# "You can access the download script "
735+
# "directly from this URL: "
736+
# "{0}".format(download_script_url))
735737

736738
data_table = root.findAll('table', class_='list', id='report')[0]
737739
columns = {'uid':[], 'URL':[], 'size':[]}
@@ -799,32 +801,31 @@ def _parse_staging_request_page(self, data_list_page):
799801

800802
if len(columns['uid']) == 0:
801803
raise RemoteServiceError("No valid UIDs were found in the staged "
802-
"data table. Please include {0} and {1}"
804+
"data table. Please include {0} "
803805
"in a bug report."
804-
.format(self._staging_log['data_list_url'],
805-
download_script_url))
806-
807-
if len(download_script_target_urls) != len(columns['URL']):
808-
log.warn("There was an error parsing the data staging page. "
809-
"The results from the page and the download script "
810-
"differ. You can access the download script directly "
811-
"from this URL: {0}".format(download_script_url))
812-
else:
813-
bad_urls = []
814-
for (rurl,url) in (zip(columns['URL'],
815-
download_script_target_urls)):
816-
if rurl == 'None_Found':
817-
url_uid = os.path.split(url)[-1]
818-
ind = np.where(np.array(columns['uid']) == url_uid)[0][0]
819-
columns['URL'][ind] = url
820-
elif rurl != url:
821-
bad_urls.append((rurl, url))
822-
if bad_urls:
823-
log.warn("There were mismatches between the parsed URLs "
824-
"from the staging page ({0}) and the download "
825-
"script ({1})."
826-
.format(self._staging_log['data_list_url'],
827-
download_script_url))
806+
.format(self._staging_log['data_list_url']))
807+
808+
#if len(download_script_target_urls) != len(columns['URL']):
809+
# log.warn("There was an error parsing the data staging page. "
810+
# "The results from the page and the download script "
811+
# "differ. You can access the download script directly "
812+
# "from this URL: {0}".format(download_script_url))
813+
#else:
814+
# bad_urls = []
815+
# for (rurl,url) in (zip(columns['URL'],
816+
# download_script_target_urls)):
817+
# if rurl == 'None_Found':
818+
# url_uid = os.path.split(url)[-1]
819+
# ind = np.where(np.array(columns['uid']) == url_uid)[0][0]
820+
# columns['URL'][ind] = url
821+
# elif rurl != url:
822+
# bad_urls.append((rurl, url))
823+
# if bad_urls:
824+
# log.warn("There were mismatches between the parsed URLs "
825+
# "from the staging page ({0}) and the download "
826+
# "script ({1})."
827+
# .format(self._staging_log['data_list_url'],
828+
# download_script_url))
828829

829830
tbl = Table([Column(name=k, data=v) for k,v in iteritems(columns)])
830831

astroquery/alma/tests/test_alma.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ def test_SgrAstar(monkeypatch):
9292

9393
# test that max_results = 50
9494
assert len(result) == 82
95-
assert b'2011.0.00217.S' in result['Project_code']
95+
assert b'2011.0.00217.S' in result['Project code']
9696

9797
def test_staging(monkeypatch):
9898

@@ -105,10 +105,10 @@ def test_staging(monkeypatch):
105105
target = 'NGC4945'
106106
project_code = '2011.0.00121.S'
107107
payload = {'project_code':project_code,
108-
'source_name':target,}
108+
'source_name_resolver':target,}
109109
result = alma.query(payload=payload)
110110

111-
uid_url_table = alma.stage_data(result['Asdm_uid'])
111+
uid_url_table = alma.stage_data(result['Asdm uid'])
112112
assert len(uid_url_table) == 2
113113

114114

astroquery/alma/tests/test_alma_remote.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -25,19 +25,19 @@ def test_SgrAstar(self, temp_dir):
2525
alma.cache_location = temp_dir
2626

2727
result_s = alma.query_object('Sgr A*')
28-
assert b'2011.0.00217.S' in result_s['Project_code']
28+
assert b'2011.0.00217.S' in result_s['Project code']
2929
c = coordinates.SkyCoord(266.41681662*u.deg, -29.00782497*u.deg,
3030
frame='fk5')
3131
result_c = alma.query_region(c, 1*u.deg)
32-
assert b'2011.0.00217.S' in result_c['Project_code']
32+
assert b'2011.0.00217.S' in result_c['Project code']
3333

3434
def test_stage_data(self, temp_dir):
3535
alma = Alma()
3636
alma.cache_location = temp_dir
3737

3838
result_s = alma.query_object('Sgr A*')
39-
assert b'2011.0.00217.S' in result_s['Project_code']
40-
uid = result_s['Asdm_uid'][0]
39+
assert b'2011.0.00217.S' in result_s['Project code']
40+
uid = result_s['Asdm uid'][0]
4141

4242
alma.stage_data([uid])
4343

@@ -47,24 +47,24 @@ def test_doc_example(self, temp_dir):
4747
alma2 = Alma()
4848
alma2.cache_location = temp_dir
4949
m83_data = alma.query_object('M83')
50-
assert m83_data.colnames == ['Project_code', 'Source_name', 'RA',
51-
'Dec', 'Band', 'Frequency_resolution',
52-
'Integration', 'Release_date',
53-
'Frequency_support',
54-
'Velocity_resolution', 'Pol_products',
55-
'Observation_date', 'PI_name', 'PWV',
56-
'Member_ous_id', 'Asdm_uid',
57-
'Project_title', 'Project_type',
58-
'Scan_intent']
50+
assert m83_data.colnames == ['Project code', 'Source name', 'RA',
51+
'Dec', 'Band', 'Frequency resolution',
52+
'Integration', 'Release date',
53+
'Frequency support',
54+
'Velocity resolution', 'Pol products',
55+
'Observation date', 'PI name', 'PWV',
56+
'Member ous id', 'Asdm uid',
57+
'Project title', 'Project type',
58+
'Scan intent']
5959
galactic_center = coordinates.SkyCoord(0*u.deg, 0*u.deg,
6060
frame='galactic')
6161
gc_data = alma.query_region(galactic_center, 1*u.deg)
6262

63-
uids = np.unique(m83_data['Asdm_uid'])
63+
uids = np.unique(m83_data['Asdm uid'])
6464
assert b'uid://A002/X3b3400/X90f' in uids
65-
X90f = (m83_data['Asdm_uid'] == b'uid://A002/X3b3400/X90f')
65+
X90f = (m83_data['Asdm uid'] == b'uid://A002/X3b3400/X90f')
6666
assert X90f.sum() == 45
67-
X31 = (m83_data['Member_ous_id'] == b'uid://A002/X3216af/X31')
67+
X31 = (m83_data['Member ous id'] == b'uid://A002/X3216af/X31')
6868
assert X31.sum() == 225
6969

7070
link_list_asdm = alma.stage_data('uid://A002/X3b3400/X90f')
@@ -100,8 +100,8 @@ def test_cycle1(self, temp_dir):
100100
assert len(result) == 1
101101

102102
# Need new Alma() instances each time
103-
uid_url_table_mous = alma().stage_data(result['Member_ous_id'])
104-
uid_url_table_asdm = alma().stage_data(result['Asdm_uid'])
103+
uid_url_table_mous = alma().stage_data(result['Member ous id'])
104+
uid_url_table_asdm = alma().stage_data(result['Asdm uid'])
105105
# I believe the fixes as part of #495 have resulted in removal of a
106106
# redundancy in the table creation, so a 1-row table is OK here.
107107
# A 2-row table may not be OK any more, but that's what it used to
@@ -134,8 +134,8 @@ def test_cycle0(self, temp_dir):
134134

135135
alma1 = alma()
136136
alma2 = alma()
137-
uid_url_table_mous = alma1.stage_data(result['Member_ous_id'])
138-
uid_url_table_asdm = alma2.stage_data(result['Asdm_uid'])
137+
uid_url_table_mous = alma1.stage_data(result['Member ous id'])
138+
uid_url_table_asdm = alma2.stage_data(result['Asdm uid'])
139139
assert len(uid_url_table_asdm) == 1
140140
assert len(uid_url_table_mous) == 32
141141

0 commit comments

Comments
 (0)