Skip to content

Commit 031d876

Browse files
committed
clean up environment variable
1 parent 2179522 commit 031d876

File tree

4 files changed

+22
-22
lines changed

4 files changed

+22
-22
lines changed

pyclowder/connectors.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ def _build_resource(self, body, host, secret_key, clowder_version):
240240

241241
elif resource_type == "file":
242242
ext = os.path.splitext(filename)[1]
243-
if clowder_version == 2.0:
243+
if clowder_version == 2:
244244
return {
245245
"type": "file",
246246
"id": fileid,
@@ -405,14 +405,14 @@ def _process_message(self, body):
405405
if not host.endswith('/'): host += '/'
406406
secret_key = body.get('secretKey', '')
407407
retry_count = 0 if 'retry_count' not in body else body['retry_count']
408-
clowder_version = float(body.get('clowderVersion', os.getenv('clowder_version', '1.0')))
408+
clowder_version = int(body.get('clowderVersion', os.getenv('CLOWDER_VERSION', '1')))
409409
resource = self._build_resource(body, host, secret_key, clowder_version)
410410
if not resource:
411411
logging.error("No resource found, this is bad.")
412412
return
413413

414414
# register extractor
415-
if clowder_version != 2.0:
415+
if clowder_version != 2:
416416
url = "%sapi/extractors" % source_host
417417
if url not in Connector.registered_clowder:
418418
Connector.registered_clowder.append(url)

pyclowder/datasets.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from pyclowder.collections import get_datasets, get_child_collections, delete as delete_collection
1616
from pyclowder.utils import StatusMessage
1717

18-
clowder_version = float(os.getenv('clowder_version', '1.0'))
18+
clowder_version = int(os.getenv('CLOWDER_VERSION', '1'))
1919

2020
def create_empty(connector, host, key, datasetname, description, parentid=None, spaceid=None):
2121
"""Create a new dataset in Clowder.
@@ -29,7 +29,7 @@ def create_empty(connector, host, key, datasetname, description, parentid=None,
2929
parentid -- id of parent collection
3030
spaceid -- id of the space to add dataset to
3131
"""
32-
if clowder_version >= 2.0:
32+
if clowder_version == 2:
3333
datasetid = v2datasets.create_empty(connector, host, key, datasetname, description, parentid, spaceid)
3434
else:
3535
datasetid = v1datasets.create_empty(connector, host, key, datasetname, description, parentid, spaceid)
@@ -45,7 +45,7 @@ def delete(connector, host, key, datasetid):
4545
key -- the secret key to login to clowder
4646
datasetid -- the dataset to delete
4747
"""
48-
if clowder_version >= 2.0:
48+
if clowder_version == 2:
4949
result = v2datasets.delete(connector, host, key, datasetid)
5050
else:
5151
result = v2datasets.delete(connector, host, key, datasetid)
@@ -87,7 +87,7 @@ def download(connector, host, key, datasetid):
8787
key -- the secret key to login to clowder
8888
datasetid -- the file that is currently being processed
8989
"""
90-
if clowder_version >= 2.0:
90+
if clowder_version == 2:
9191
zipfile = v2datasets.download(connector, host, key, datasetid)
9292
else:
9393
zipfile = v1datasets.download(connector, host, key, datasetid)
@@ -104,7 +104,7 @@ def download_metadata(connector, host, key, datasetid, extractor=None):
104104
datasetid -- the dataset to fetch metadata of
105105
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
106106
"""
107-
if clowder_version >= 2.0:
107+
if clowder_version == 2:
108108
result_json = v2datasets.download_metadata(connector, host, key, datasetid, extractor)
109109
return result_json
110110
else:
@@ -121,7 +121,7 @@ def get_info(connector, host, key, datasetid):
121121
key -- the secret key to login to clowder
122122
datasetid -- the dataset to get info of
123123
"""
124-
if clowder_version >= 2.0:
124+
if clowder_version == 2:
125125
info = v2datasets.get_info(connector, host, key, datasetid)
126126
else:
127127
info = v1datasets.get_info(connector, host, key, datasetid)
@@ -137,7 +137,7 @@ def get_file_list(connector, host, key, datasetid):
137137
key -- the secret key to login to clowder
138138
datasetid -- the dataset to get filelist of
139139
"""
140-
if clowder_version >= 2.0:
140+
if clowder_version == 2:
141141
file_list = v2datasets.get_file_list(connector, host, key, datasetid)
142142
else:
143143
file_list = v1datasets.get_file_list(connector, host, key, datasetid)
@@ -155,7 +155,7 @@ def remove_metadata(connector, host, key, datasetid, extractor=None):
155155
extractor -- extractor name to filter deletion
156156
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
157157
"""
158-
if clowder_version >= 2.0:
158+
if clowder_version == 2:
159159
v2datasets.remove_metadata(connector, host, key, datasetid, extractor)
160160
else:
161161
v1datasets.remove_metadata(connector, host, key, datasetid, extractor)
@@ -171,7 +171,7 @@ def submit_extraction(connector, host, key, datasetid, extractorname):
171171
datasetid -- the dataset UUID to submit
172172
extractorname -- registered name of extractor to trigger
173173
"""
174-
if clowder_version >= 2.0:
174+
if clowder_version == 2:
175175
result_status_code = v2datasets.submit_extraction(connector, host, key, datasetid, extractorname)
176176
else:
177177
result_status_code = v1datasets.submit_extraction(connector, host, key, datasetid, extractorname)
@@ -232,7 +232,7 @@ def upload_metadata(connector, host, key, datasetid, metadata):
232232
datasetid -- the dataset that is currently being processed
233233
metadata -- the metadata to be uploaded
234234
"""
235-
if clowder_version >= 2.0:
235+
if clowder_version == 2:
236236
v2datasets.upload_metadata(connector, host, key, datasetid, metadata)
237237
else:
238238
v1datasets.upload_metadata(connector, host, key, datasetid, metadata)

pyclowder/extractors.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626

2727
from dotenv import load_dotenv
2828
load_dotenv()
29-
clowder_version = float(os.getenv('clowder_version', '1.0'))
29+
clowder_version = float(os.getenv('CLOWDER_VERSION', '1'))
3030

3131

3232
class Extractor(object):
@@ -266,7 +266,7 @@ def get_metadata(self, content, resource_type, resource_id, server=None):
266266
if not self._check_key(k, self.extractor_info['contexts']):
267267
logger.debug("Simple check could not find %s in contexts" % k)
268268
# TODO generate clowder2.0 extractor info
269-
if clowder_version >= 2.0:
269+
if clowder_version == 2:
270270
new_extractor_info = self._get_extractor_info_v2()
271271
md = dict()
272272
md["file_version"] = 1

pyclowder/files.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def download(connector, host, key, fileid, intermediatefileid=None, ext=""):
4141
intermediatefileid -- either same as fileid, or the intermediate file to be used
4242
ext -- the file extension, the downloaded file will end with this extension
4343
"""
44-
if clowder_version >= 2.0:
44+
if clowder_version == 2:
4545
inputfilename = v2files.download(connector, host, key, fileid, intermediatefileid, ext)
4646
else:
4747
inputfilename = v1files.download(connector, host, key, fileid, intermediatefileid, ext)
@@ -58,7 +58,7 @@ def download_info(connector, host, key, fileid):
5858
fileid -- the file to fetch metadata of
5959
"""
6060

61-
if clowder_version >= 2.0:
61+
if clowder_version == 2:
6262
result = v2files.download_info(connector, host, key, fileid)
6363
else:
6464
result = v1files.download_info(connector, host, key, fileid)
@@ -75,7 +75,7 @@ def download_metadata(connector, host, key, fileid, extractor=None):
7575
fileid -- the file to fetch metadata of
7676
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
7777
"""
78-
if clowder_version >= 2.0:
78+
if clowder_version == 2:
7979
result = v2files.download_metadata(connector, host, key, fileid, extractor)
8080
else:
8181
result = v1files.download_metadata(connector, host, key, fileid, extractor)
@@ -92,7 +92,7 @@ def submit_extraction(connector, host, key, fileid, extractorname):
9292
fileid -- the file UUID to submit
9393
extractorname -- registered name of extractor to trigger
9494
"""
95-
if clowder_version >= 2.0:
95+
if clowder_version == 2:
9696
result = v2files.submit_extraction(connector, host, key, fileid, extractorname)
9797
else:
9898
result = v1files.submit_extraction(connector, host, key, fileid, extractorname)
@@ -162,7 +162,7 @@ def upload_metadata(connector, host, key, fileid, metadata):
162162
metadata -- the metadata to be uploaded
163163
"""
164164

165-
if clowder_version >= 2.0:
165+
if clowder_version == 2:
166166
v2files.upload_metadata(connector, host, key, fileid, metadata)
167167
else:
168168
v1files.upload_metadata(connector, host, key, fileid, metadata)
@@ -277,7 +277,7 @@ def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate
277277
check_duplicate -- check if filename already exists in dataset and skip upload if so
278278
"""
279279

280-
if clowder_version >= 2.0:
280+
if clowder_version == 2:
281281
v2files.upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate)
282282
else:
283283
logger = logging.getLogger(__name__)
@@ -322,7 +322,7 @@ def _upload_to_dataset_local(connector, host, key, datasetid, filepath):
322322
filepath -- path to file
323323
"""
324324

325-
if clowder_version >= 2.0:
325+
if clowder_version == 2:
326326
uploadedfileid = v2files._upload_to_dataset_local(connector, host, key, datasetid, filepath)
327327
else:
328328
uploadedfileid = v1files._upload_to_dataset_local(connector, host, key, datasetid, filepath)

0 commit comments

Comments
 (0)