Skip to content

Commit 86bce43

Browse files
author
Alvaro Arroyo Parejo
committed
adding verification as environment variable for CI/CD pipeline
1 parent fcb7b4d commit 86bce43

File tree

5 files changed

+47
-24
lines changed

5 files changed

+47
-24
lines changed

ESANEOCC/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,9 @@ class Conf(_config.ConfigNamespace):
3030

3131
TIMEOUT = 60
3232

33+
34+
VERIFICATION = os.getenv('VERIFICATION', default=True)
35+
3336
conf = Conf()
3437

3538
from .core import neocc, ESAneoccClass

ESANEOCC/lists.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
# Import BASE URL and TIMEOUT
4646
API_URL = conf.API_URL
4747
TIMEOUT = conf.TIMEOUT
48+
VERIFICATION = conf.VERIFICATION
4849

4950

5051
def get_list_url(list_name):
@@ -115,7 +116,8 @@ def get_list_data(url, list_name):
115116
Data frame which contains the data of the requested list.
116117
"""
117118
# Get data from URL
118-
data_list = requests.get(API_URL + url, timeout=TIMEOUT).content
119+
data_list = requests.get(API_URL + url, timeout=TIMEOUT,
120+
verify=VERIFICATION).content
119121

120122
# Decode the data using UTF-8
121123
data_list_d = io.StringIO(data_list.decode('utf-8'))

ESANEOCC/tabs.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@
7070
EPHEM_URL = conf.EPHEM_URL
7171
SUMMARY_URL = conf.SUMMARY_URL
7272
TIMEOUT = conf.TIMEOUT
73+
VERIFICATION = conf.VERIFICATION
7374

7475
def get_object_url(name, tab, **kwargs):
7576
"""Get url from requested object and tab name.
@@ -162,7 +163,8 @@ def get_object_data(url):
162163
Object in byte format.
163164
"""
164165
# Get data from URL
165-
data_obj = requests.get(API_URL + url, timeout=TIMEOUT).content
166+
data_obj = requests.get(API_URL + url, timeout=TIMEOUT,
167+
verify=VERIFICATION).content
166168
# Parse data and assign attributes to object
167169

168170
return data_obj
@@ -1881,7 +1883,8 @@ def _ephem_parser(self, name, observatory, start, stop, step, step_unit):
18811883
# Request data two times if the first attempt fails
18821884
try:
18831885
# Get object data
1884-
data_obj = requests.get(url_ephe, timeout=TIMEOUT).content
1886+
data_obj = requests.get(url_ephe, timeout=TIMEOUT,
1887+
verify=VERIFICATION).content
18851888

18861889
except ConnectionError: # pragma: no cover
18871890
print('Initial attempt to obtain object data failed. '
@@ -1891,7 +1894,8 @@ def _ephem_parser(self, name, observatory, start, stop, step, step_unit):
18911894
# Wait 5 seconds
18921895
time.sleep(5)
18931896
# Get object data
1894-
data_obj = requests.get(url_ephe, timeout=TIMEOUT).content
1897+
data_obj = requests.get(url_ephe, timeout=TIMEOUT,
1898+
verify=VERIFICATION).content
18951899

18961900
# Check if file contains errors due to bad URL keys
18971901
check = io.StringIO(data_obj.decode('utf-8'))
@@ -2018,7 +2022,8 @@ def _summary_parser(self, name):
20182022
url = SUMMARY_URL + str(name).replace(' ', '%20')
20192023

20202024
# Read the url as html
2021-
contents = requests.get(url, timeout=TIMEOUT).content
2025+
contents = requests.get(url, timeout=TIMEOUT,
2026+
verify=VERIFICATION).content
20222027
# Parse html using BS
20232028
parsed_html = BeautifulSoup(contents, 'lxml')
20242029
# Summary properties are in </div>. Search for them:

test/test_NEOCC.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,8 @@
3535
# Import BASE URL and TIMEOUT
3636
API_URL = conf.API_URL
3737
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
38+
TIMEOUT = conf.TIMEOUT
39+
VERIFICATION = conf.VERIFICATION
3840

3941
# Disable warning in pylint related to monkeypath functions
4042
# pylint: disable=W0613, W0621
@@ -81,7 +83,7 @@ def patch_get(request):
8183
return monkey_p
8284

8385

84-
def get_mockreturn(name, timeout=10):
86+
def get_mockreturn(name, timeout=TIMEOUT, verify=VERIFICATION):
8587
"""Define a function to return the appropriate data stored in the
8688
data/ directory as a readable object within the MockResponse class.
8789
"""

test/test_neocc_remote.py

Lines changed: 29 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,8 @@
2828
# Import BASE URL and TIMEOUT
2929
API_URL = conf.API_URL
3030
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
31+
TIMEOUT = conf.TIMEOUT
32+
VERIFICATION = conf.VERIFICATION
3133

3234
class TestLists:
3335
"""Class which contains the unitary tests for lists module.
@@ -98,8 +100,9 @@ def test_parse_list(self):
98100
url_series = ["nea_list", "updated_nea", "monthly_update"]
99101
for url in url_series:
100102
# Get data from URL
101-
data_list = requests.get(API_URL + self.lists_dict[url]).\
102-
content
103+
data_list = requests.get(API_URL + self.lists_dict[url],
104+
timeout=TIMEOUT,
105+
verify=VERIFICATION).content
103106
# Decode the data using UTF-8
104107
data_list_d = io.StringIO(data_list.decode('utf-8'))
105108

@@ -113,8 +116,9 @@ def test_parse_list(self):
113116
"impacted_objects"]
114117
for url in url_dfs:
115118
# Get data from URL
116-
data_list = requests.get(API_URL + self.lists_dict[url]).\
117-
content
119+
data_list = requests.get(API_URL + self.lists_dict[url],
120+
timeout=TIMEOUT,
121+
verify=VERIFICATION).content
118122
# Decode the data using UTF-8
119123
data_list_d = io.StringIO(data_list.decode('utf-8'))
120124

@@ -134,8 +138,9 @@ def test_parse_nea(self):
134138
url_series = ["nea_list", "updated_nea", "monthly_update"]
135139
for url in url_series:
136140
# Get data from URL
137-
data_list = requests.get(API_URL + self.lists_dict[url]).\
138-
content
141+
data_list = requests.get(API_URL + self.lists_dict[url],
142+
timeout=TIMEOUT,
143+
verify=VERIFICATION).content
139144
# Decode the data using UTF-8
140145
data_list_d = io.StringIO(data_list.decode('utf-8'))
141146
# Parse using parse_nea
@@ -177,8 +182,9 @@ def test_parse_risk(self):
177182

178183
for url in url_risks:
179184
# Get data from URL
180-
data_list = requests.get(API_URL + self.lists_dict[url]).\
181-
content
185+
data_list = requests.get(API_URL + self.lists_dict[url],
186+
timeout=TIMEOUT,
187+
verify=VERIFICATION).content
182188
# Decode the data using UTF-8
183189
data_list_d = io.StringIO(data_list.decode('utf-8'))
184190
# Parse using parse_nea
@@ -230,8 +236,9 @@ def test_parse_clo(self):
230236

231237
for url in url_close:
232238
# Get data from URL
233-
data_list = requests.get(API_URL + self.lists_dict[url]).\
234-
content
239+
data_list = requests.get(API_URL + self.lists_dict[url],
240+
timeout=TIMEOUT,
241+
verify=VERIFICATION).content
235242
# Decode the data using UTF-8
236243
data_list_d = io.StringIO(data_list.decode('utf-8'))
237244
# Parse using parse_nea
@@ -280,8 +287,9 @@ def test_parse_pri(self):
280287

281288
for url in url_priority:
282289
# Get data from URL
283-
data_list = requests.get(API_URL + self.lists_dict[url]).\
284-
content
290+
data_list = requests.get(API_URL + self.lists_dict[url],
291+
timeout=TIMEOUT,
292+
verify=VERIFICATION).content
285293
# Decode the data using UTF-8
286294
data_list_d = io.StringIO(data_list.decode('utf-8'))
287295
# Parse using parse_nea
@@ -322,8 +330,9 @@ def test_parse_encounter(self):
322330
'Max Mag']
323331

324332
# Get data from URL
325-
data_list = requests.get(API_URL + self.lists_dict[url]).\
326-
content
333+
data_list = requests.get(API_URL + self.lists_dict[url],
334+
timeout=TIMEOUT,
335+
verify=VERIFICATION).content
327336
# Decode the data using UTF-8
328337
data_list_d = io.StringIO(data_list.decode('utf-8'))
329338
# Parse using parse_nea
@@ -360,8 +369,9 @@ def test_parse_impacted(self):
360369
"""
361370
url = 'impacted_objects'
362371
# Get data from URL
363-
data_list = requests.get(API_URL + self.lists_dict[url]).\
364-
content
372+
data_list = requests.get(API_URL + self.lists_dict[url],
373+
timeout=TIMEOUT,
374+
verify=VERIFICATION).content
365375
# Decode the data using UTF-8
366376
data_list_d = io.StringIO(data_list.decode('utf-8'))
367377
# Parse using parse_nea
@@ -390,8 +400,9 @@ def test_parse_neo_catalogue(self):
390400
'non-grav param.']
391401
for url in url_cat:
392402
# Get data from URL
393-
data_list = requests.get(API_URL + self.lists_dict[url]).\
394-
content
403+
data_list = requests.get(API_URL + self.lists_dict[url],
404+
timeout=TIMEOUT,
405+
verify=VERIFICATION).content
395406
# Decode the data using UTF-8
396407
data_list_d = io.StringIO(data_list.decode('utf-8'))
397408
# Parse using parse_nea

0 commit comments

Comments
 (0)