Skip to content

Commit 13087f4

Browse files
committed
Fixing precommit
2 parents e310716 + 256a1fe commit 13087f4

30 files changed

+990
-257
lines changed

.github/workflows/pre-commit.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,4 @@ jobs:
1111
steps:
1212
- uses: actions/checkout@v3
1313
- uses: actions/setup-python@v3
14-
- uses: pre-commit/[email protected].0
14+
- uses: pre-commit/[email protected].1

api/evaluator.py

Lines changed: 5 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -292,48 +292,6 @@ def eval_uniqueness(self, id_list, data_or_metadata="(meta)data"):
292292

293293
return (points, msg_list)
294294

295-
def eval_persistency(self, id_list, data_or_metadata="(meta)data"):
296-
points = 0
297-
msg_list = []
298-
for _id in id_list:
299-
_points = 0
300-
if ut.is_persistent_id(_id):
301-
_msg = "Found persistent identifier for the %s: %s" % (
302-
data_or_metadata,
303-
_id,
304-
)
305-
_points = 100
306-
points = 100
307-
else:
308-
_msg = "Identifier is not persistent for the %s: %s" % (
309-
data_or_metadata,
310-
_id,
311-
)
312-
msg_list.append({"message": _msg, "points": _points})
313-
314-
return (points, msg_list)
315-
316-
def eval_uniqueness(self, id_list, data_or_metadata="(meta)data"):
317-
points = 0
318-
msg_list = []
319-
for _id in id_list:
320-
_points = 0
321-
if ut.is_unique_id(_id):
322-
_msg = "Found a globally unique identifier for the %s: %s" % (
323-
data_or_metadata,
324-
_id,
325-
)
326-
_points = 100
327-
points = 100
328-
else:
329-
_msg = "Identifier found for the %s is not globally unique: %s" % (
330-
data_or_metadata,
331-
_id,
332-
)
333-
msg_list.append({"message": _msg, "points": _points})
334-
335-
return (points, msg_list)
336-
337295
# TESTS
338296
# FINDABLE
339297
@ConfigTerms(term_id="identifier_term")
@@ -1388,11 +1346,11 @@ def rda_i2_01m(self, **kwargs):
13881346
term_data = kwargs["terms_cv"]
13891347
term_metadata = term_data["metadata"]
13901348

1391-
for index, e_k in term_metadata.iterrows():
1392-
tmp_msg, cv = ut.check_controlled_vocabulary(e_k["text_value"])
1393-
if tmp_msg is not None:
1394-
logger.debug(_("Found potential vocabulary") + ": %s" % tmp_msg)
1395-
self.cvs.append(cv)
1349+
for index, e_k in term_metadata.iterrows():
1350+
tmp_msg, cv = ut.check_controlled_vocabulary(e_k["text_value"])
1351+
if tmp_msg is not None:
1352+
logger.debug(_("Found potential vocabulary") + ": %s" % tmp_msg)
1353+
self.cvs.append(cv)
13961354

13971355
if len(self.cvs) > 0:
13981356
for e in self.cvs:

api/rda.py

Lines changed: 52 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import glob
12
import importlib
23
import logging
34
import os
@@ -8,7 +9,7 @@
89
from connexion import NoContent
910

1011
import api.utils as ut
11-
from api.evaluator import Evaluator
12+
from api import evaluator
1213
from fair import app_dirname, load_config
1314

1415
logging.basicConfig(
@@ -38,10 +39,12 @@ def wrapper(body, **kwargs):
3839
# Get the identifiers through a search query
3940
ids = [item_id]
4041
# FIXME oai-pmh should be no different
42+
downstream_logger = evaluator.logger
4143
if repo not in ["oai-pmh"]:
4244
try:
4345
logger.debug("Trying to import plugin from plugins.%s.plugin" % (repo))
4446
plugin = importlib.import_module("plugins.%s.plugin" % (repo), ".")
47+
downstream_logger = plugin.logger
4548
except Exception as e:
4649
logger.error(str(e))
4750
return str(e), 400
@@ -54,25 +57,72 @@ def wrapper(body, **kwargs):
5457
logger.error(str(e))
5558
return str(e), 400
5659

60+
# Set handler for evaluator logs
61+
evaluator_handler = ut.EvaluatorLogHandler()
62+
downstream_logger.addHandler(evaluator_handler)
63+
5764
# Collect FAIR checks per metadata identifier
5865
result = {}
5966
exit_code = 200
6067
for item_id in ids:
6168
# FIXME oai-pmh should be no different
6269
if repo in ["oai-pmh"]:
63-
eva = Evaluator(item_id, oai_base, lang)
70+
eva = evaluator.Evaluator(item_id, oai_base, lang)
6471
else:
6572
eva = plugin.Plugin(item_id, oai_base, lang)
6673
_result, _exit_code = wrapped_func(body, eva=eva)
74+
logger.debug(
75+
"Raw result returned for indicator ID '%s': %s" % (item_id, _result)
76+
)
6777
result[item_id] = _result
6878
if _exit_code != 200:
6979
exit_code = _exit_code
7080

81+
# Append evaluator logs to the final results
82+
result["evaluator_logs"] = evaluator_handler.logs
83+
logger.debug("Evaluator logs appended through 'evaluator_logs' property")
84+
7185
return result, exit_code
7286

7387
return wrapper
7488

7589

90+
def endpoints(plugin=None, plugins_path="plugins"):
91+
plugins_with_endpoint = []
92+
links = []
93+
94+
# Get the list of plugins
95+
modules = glob.glob(os.path.join(app_dirname, plugins_path, "*"))
96+
plugins_list = [
97+
os.path.basename(folder) for folder in modules if os.path.isdir(folder)
98+
]
99+
100+
# Obtain endpoint from each plugin's config
101+
for plug in plugins_list:
102+
config = load_config(plugin=plug, fail_if_no_config=False)
103+
endpoint = config.get("Generic", "endpoint", fallback="")
104+
if not endpoint:
105+
logger.debug(
106+
"Plugin's config does not contain 'Generic:endpoint' section: %s" % plug
107+
)
108+
logger.warning(
109+
"Could not get (meta)data endpoint from plugin's config: %s " % plug
110+
)
111+
else:
112+
logger.debug("Obtained endpoint for plugin '%s': %s" % (plug, endpoint))
113+
links.append(endpoint)
114+
plugins_with_endpoint.append(plug)
115+
# Create a dict with all the found endpoints
116+
enp = dict(zip(plugins_with_endpoint, links))
117+
# If the plugin is given then only returns a message
118+
if plugin:
119+
try:
120+
return enp[plugin]
121+
except:
122+
return (enp, 404)
123+
return enp
124+
125+
76126
@load_evaluator
77127
def rda_f1_01m(body, eva):
78128
try:

api/utils.py

Lines changed: 53 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,15 @@
1515
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
1616

1717

18+
class EvaluatorLogHandler(logging.Handler):
19+
def __init__(self, level=logging.DEBUG):
20+
self.level = level
21+
self.logs = []
22+
23+
def handle(self, record):
24+
self.logs.append("[%s] %s" % (record.levelname, record.msg))
25+
26+
1827
def get_doi_str(doi_str):
1928
doi_to_check = re.findall(
2029
r"10[\.-]+.[\d\.-]+/[\w\.-]+[\w\.-]+/[\w\.-]+[\w\.-]", doi_str
@@ -468,7 +477,7 @@ def oai_check_record_url(oai_base, metadata_prefix, pid):
468477

469478
url = oai_base + action + params
470479
logging.debug("Trying: " + url)
471-
response = requests.get(url)
480+
response = requests.get(url, verify=False)
472481
error = 0
473482
for tags in ET.fromstring(response.text).findall(
474483
".//{http://www.openarchives.org/OAI/2.0/}error"
@@ -482,7 +491,7 @@ def oai_check_record_url(oai_base, metadata_prefix, pid):
482491

483492
url = oai_base + action + params
484493
logging.debug("Trying: " + url)
485-
response = requests.get(url)
494+
response = requests.get(url, verify=False)
486495
error = 0
487496
for tags in ET.fromstring(response.text).findall(
488497
".//{http://www.openarchives.org/OAI/2.0/}error"
@@ -499,7 +508,7 @@ def oai_check_record_url(oai_base, metadata_prefix, pid):
499508

500509
url = oai_base + action + params
501510
logging.debug("Trying: " + url)
502-
response = requests.get(url)
511+
response = requests.get(url, verify=False)
503512
error = 0
504513
for tags in ET.fromstring(response.text).findall(
505514
".//{http://www.openarchives.org/OAI/2.0/}error"
@@ -516,7 +525,7 @@ def oai_check_record_url(oai_base, metadata_prefix, pid):
516525

517526
url = oai_base + action + params
518527
logging.debug("Trying: " + url)
519-
response = requests.get(url)
528+
response = requests.get(url, verify=False)
520529
error = 0
521530
for tags in ET.fromstring(response.text).findall(
522531
".//{http://www.openarchives.org/OAI/2.0/}error"
@@ -657,6 +666,11 @@ def check_controlled_vocabulary(value):
657666
if coar_c:
658667
cv_msg = "COAR - Controlled vocabulary. Data: %s" % coar_msg
659668
cv = "purl.org/coar"
669+
elif "wikidata.org" in value:
670+
wikidata_c, wikidata_msg = wikidata_check(value)
671+
if wikidata_c:
672+
cv_msg = "Wikidata - URI term. Data: %s" % wikidata_msg
673+
cv = "wikidata.org/wiki"
660674
return cv_msg, cv
661675

662676

@@ -688,7 +702,7 @@ def orcid_basic_info(orcid):
688702
}
689703
try:
690704
url = "https://pub.orcid.org/v3.0/" + orcid
691-
r = requests.get(url, headers=headers) # GET with headers
705+
r = requests.get(url, verify=False, headers=headers) # GET with headers
692706
xmlTree = ET.fromstring(r.text)
693707
item = xmlTree.findall(
694708
".//{http://www.orcid.org/ns/common}assertion-origin-name"
@@ -703,7 +717,7 @@ def orcid_basic_info(orcid):
703717
def loc_basic_info(loc):
704718
# Returns the first line of json LD
705719
headers = {"Accept": "application/json"} # Type of response accpeted
706-
r = requests.get(loc, headers=headers) # GET with headers
720+
r = requests.get(loc, verify=False, headers=headers) # GET with headers
707721
output = r.json()
708722
return output[0]
709723

@@ -715,7 +729,7 @@ def geonames_basic_info(geonames):
715729
geonames = geonames[0 : geonames.index("/")]
716730
url = "http://api.geonames.org/get?geonameId=%s&username=frames" % geonames
717731
headers = {"Accept": "application/json"} # Type of response accpeted
718-
r = requests.get(url, headers=headers) # GET with headers
732+
r = requests.get(url, verify=False, headers=headers) # GET with headers
719733
logging.debug("Request genoames: %s" % r.text)
720734
output = ""
721735
try:
@@ -731,14 +745,24 @@ def coar_check(coar):
731745
coar = coar[0 : coar.index("/")]
732746
coar = coar.replace("resource_type", "resource_types")
733747
url = "https://vocabularies.coar-repositories.org/%s" % coar
734-
r = requests.get(url) # GET with headers
748+
r = requests.get(url, verify=False) # GET with headers
735749
logging.debug("Request coar: %s" % r.text)
736750
if r.status_code == 200:
737751
return True, "purl.org/coar"
738752
else:
739753
return False, ""
740754

741755

756+
def wikidata_check(wikidata):
757+
logging.debug("Checking wikidata")
758+
r = requests.head(wikidata, verify=False) # GET with headers
759+
logging.debug("Request coar: %s" % r.text)
760+
if r.status_code == 200:
761+
return True, "wikidata.org/wiki"
762+
else:
763+
return False, ""
764+
765+
742766
def getty_basic_info(loc):
743767
r = requests.get(loc + ".json") # GET
744768
if r.status_code == 200:
@@ -773,7 +797,7 @@ def get_rdf_metadata_format(oai_base):
773797
def licenses_list():
774798
url = "https://spdx.org/licenses/licenses.json"
775799
headers = {"Accept": "application/json"} # Type of response accpeted
776-
r = requests.get(url, headers=headers) # GET with headers
800+
r = requests.get(url, verify=False, headers=headers) # GET with headers
777801
output = r.json()
778802
licenses = []
779803
for e in output["licenses"]:
@@ -784,7 +808,7 @@ def licenses_list():
784808
def is_spdx_license(license_id, machine_readable=False):
785809
url = "https://spdx.org/licenses/licenses.json"
786810
headers = {"Accept": "application/json"} # Type of response accpeted
787-
r = requests.get(url, headers=headers) # GET with headers
811+
r = requests.get(url, verify=False, headers=headers) # GET with headers
788812
payload = r.json()
789813
is_spdx = False
790814
license_list = []
@@ -819,19 +843,22 @@ def resolve_handle(handle_id):
819843
820844
Returns:
821845
"""
822-
resolves = False
823-
endpoint = urljoin("https://hdl.handle.net/api/", "handles/%s" % handle_id)
846+
handle_id_normalized = idutils.normalize_doi(handle_id)
847+
endpoint = urljoin(
848+
"https://hdl.handle.net/api/", "handles/%s" % handle_id_normalized
849+
)
824850
headers = {"Content-Type": "application/json"}
825-
r = requests.get(endpoint, headers=headers)
851+
r = requests.get(endpoint, verify=False, headers=headers)
826852
if not r.ok:
827853
msg = "Error while making a request to endpoint: %s (status code: %s)" % (
828854
endpoint,
829855
r.status_code,
830856
)
831857
raise Exception(msg)
832-
833858
json_data = r.json()
834859
response_code = json_data.get("responseCode", -1)
860+
861+
resolves = False
835862
if response_code == 1:
836863
resolves = True
837864
msg = "Handle and associated values found (HTTP 200 OK)"
@@ -963,3 +990,15 @@ def check_fairsharing_abbreviation(fairlist, abreviation):
963990
if abreviation == standard["attributes"]["abbreviation"]:
964991
return (100, "Your metadata standard appears in Fairsharing")
965992
return (0, "Your metadata standard has not been found in Fairsharing")
993+
994+
995+
def check_ror(ror):
996+
response = requests.get("https://api.ror.org/organizations/" + ror)
997+
998+
rordict = response.json()
999+
name = rordict["name"]
1000+
1001+
if response.ok:
1002+
return (True, name)
1003+
else:
1004+
return (False, "")

0 commit comments

Comments
 (0)