Skip to content

Commit 97470eb

Browse files
committed
calling v2 files if that is the version
1 parent f98a5dc commit 97470eb

File tree

1 file changed

+85
-101
lines changed

1 file changed

+85
-101
lines changed

pyclowder/files.py

Lines changed: 85 additions & 101 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
from pyclowder.datasets import get_file_list
1616
from pyclowder.collections import get_datasets, get_child_collections
17+
import api.v2.files as v2files
1718

1819
from dotenv import load_dotenv
1920
load_dotenv()
@@ -40,31 +41,16 @@ def download(connector, host, key, fileid, intermediatefileid=None, ext="", toke
4041
intermediatefileid -- either same as fileid, or the intermediate file to be used
4142
ext -- the file extension, the downloaded file will end with this extension
4243
"""
43-
44-
connector.message_process({"type": "file", "id": fileid}, "Downloading file.")
45-
46-
47-
48-
# TODO: intermediateid doesn't really seem to be used here, can we remove entirely?
49-
if not intermediatefileid:
50-
intermediatefileid = fileid
51-
5244
if clowder_version >= 2.0:
53-
url = '%sapi/v2/files/%s' % (host, intermediatefileid)
54-
headers = {"Authorization": "Bearer " + token}
55-
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True, headers=headers)
45+
v2files.download(connector, host, key, fileid, intermediatefileid, ext, token)
46+
else:
47+
connector.message_process({"type": "file", "id": fileid}, "Downloading file.")
5648

57-
(inputfile, inputfilename) = tempfile.mkstemp(suffix=ext)
5849

59-
try:
60-
with os.fdopen(inputfile, "wb") as outputfile:
61-
for chunk in result.iter_content(chunk_size=10 * 1024):
62-
outputfile.write(chunk)
63-
return inputfilename
64-
except Exception:
65-
os.remove(inputfilename)
66-
raise
67-
else:
50+
51+
# TODO: intermediateid doesn't really seem to be used here, can we remove entirely?
52+
if not intermediatefileid:
53+
intermediatefileid = fileid
6854
url = '%sapi/files/%s?key=%s' % (host, intermediatefileid, key)
6955
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
7056

@@ -91,12 +77,7 @@ def download_info(connector, host, key, fileid, token=None):
9177
"""
9278

9379
if clowder_version >= 2.0:
94-
url = '%sapi/v2/files/%s/metadata' % (host, fileid)
95-
headers = {"Authorization": "Bearer " + token}
96-
# fetch data
97-
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True, headers=headers)
98-
99-
return result.json()
80+
v2files.download_info(conector, host, key, fileid, token)
10081
else:
10182
url = '%sapi/files/%s/metadata?key=%s' % (host, fileid, key)
10283
headers = {"Authorization": "Bearer " + token}
@@ -107,7 +88,7 @@ def download_info(connector, host, key, fileid, token=None):
10788
return result.json()
10889

10990

110-
def download_metadata(connector, host, key, fileid, extractor=None):
91+
def download_metadata(connector, host, key, fileid, extractor=None, token=None):
11192
"""Download file JSON-LD metadata from Clowder.
11293
11394
Keyword arguments:
@@ -117,17 +98,19 @@ def download_metadata(connector, host, key, fileid, extractor=None):
11798
fileid -- the file to fetch metadata of
11899
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
119100
"""
101+
if clowder_version >= 2.0:
102+
v2files.download_metadata(connector, host, key, fileid, extractor, token)
103+
else:
104+
filterstring = "" if extractor is None else "&extractor=%s" % extractor
105+
url = '%sapi/files/%s/metadata.jsonld?key=%s%s' % (host, fileid, key, filterstring)
120106

121-
filterstring = "" if extractor is None else "&extractor=%s" % extractor
122-
url = '%sapi/files/%s/metadata.jsonld?key=%s%s' % (host, fileid, key, filterstring)
123-
124-
# fetch data
125-
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
107+
# fetch data
108+
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
126109

127-
return result.json()
110+
return result.json()
128111

129112

130-
def submit_extraction(connector, host, key, fileid, extractorname):
113+
def submit_extraction(connector, host, key, fileid, extractorname, token=None):
131114
"""Submit file for extraction by given extractor.
132115
133116
Keyword arguments:
@@ -137,15 +120,17 @@ def submit_extraction(connector, host, key, fileid, extractorname):
137120
fileid -- the file UUID to submit
138121
extractorname -- registered name of extractor to trigger
139122
"""
123+
if clowder_version >= 2.0:
124+
v2files.submit_extraction(connector, host, key, fileid, extractorname, token)
125+
else:
126+
url = "%sapi/files/%s/extractions?key=%s" % (host, fileid, key)
140127

141-
url = "%sapi/files/%s/extractions?key=%s" % (host, fileid, key)
142-
143-
result = connector.post(url,
144-
headers={'Content-Type': 'application/json'},
145-
data=json.dumps({"extractor": extractorname}),
146-
verify=connector.ssl_verify if connector else True)
128+
result = connector.post(url,
129+
headers={'Content-Type': 'application/json'},
130+
data=json.dumps({"extractor": extractorname}),
131+
verify=connector.ssl_verify if connector else True)
147132

148-
return result.json()
133+
return result.json()
149134

150135

151136
def submit_extractions_by_dataset(connector, host, key, datasetid, extractorname, ext=False):
@@ -212,14 +197,7 @@ def upload_metadata(connector, host, key, fileid, metadata):
212197
"""
213198

214199
if clowder_version >= 2.0:
215-
connector.message_process({"type": "file", "id": fileid}, "Uploading file metadata.")
216-
217-
headers = {'Content-Type': 'application/json',
218-
'Authorization':'Bearer ' + key}
219-
print(metadata)
220-
url = '%sapi/v2/files/%s/metadata' % (host, fileid)
221-
result = connector.post(url, headers=headers, data=json.dumps(metadata),
222-
verify=connector.ssl_verify if connector else True)
200+
v2files.upload_metadata(connector, host, key, fileid, metadata)
223201
else:
224202
connector.message_process({"type": "file", "id": fileid}, "Uploading file metadata.")
225203

@@ -327,7 +305,7 @@ def upload_thumbnail(connector, host, key, fileid, thumbnail):
327305
return thumbnailid
328306

329307

330-
def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate=False):
308+
def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate=False, token=None):
331309
"""Upload file to existing Clowder dataset.
332310
333311
Keyword arguments:
@@ -339,38 +317,41 @@ def upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate
339317
check_duplicate -- check if filename already exists in dataset and skip upload if so
340318
"""
341319

342-
logger = logging.getLogger(__name__)
320+
if clowder_version >= 2.0:
321+
v2files.upload_to_dataset(connector, host, key, datasetid, filepath, check_duplicate, token=None)
322+
else:
323+
logger = logging.getLogger(__name__)
343324

344-
if check_duplicate:
345-
ds_files = get_file_list(connector, host, key, datasetid)
346-
for f in ds_files:
347-
if f['filename'] == os.path.basename(filepath):
348-
logger.debug("found %s in dataset %s; not re-uploading" % (f['filename'], datasetid))
349-
return None
350-
351-
for source_path in connector.mounted_paths:
352-
if filepath.startswith(connector.mounted_paths[source_path]):
353-
return _upload_to_dataset_local(connector, host, key, datasetid, filepath)
354-
355-
url = '%sapi/uploadToDataset/%s?key=%s' % (host, datasetid, key)
356-
357-
if os.path.exists(filepath):
358-
filename = os.path.basename(filepath)
359-
m = MultipartEncoder(
360-
fields={'file': (filename, open(filepath, 'rb'))}
361-
)
362-
result = connector.post(url, data=m, headers={'Content-Type': m.content_type},
363-
verify=connector.ssl_verify if connector else True)
325+
if check_duplicate:
326+
ds_files = get_file_list(connector, host, key, datasetid)
327+
for f in ds_files:
328+
if f['filename'] == os.path.basename(filepath):
329+
logger.debug("found %s in dataset %s; not re-uploading" % (f['filename'], datasetid))
330+
return None
364331

365-
uploadedfileid = result.json()['id']
366-
logger.debug("uploaded file id = [%s]", uploadedfileid)
332+
for source_path in connector.mounted_paths:
333+
if filepath.startswith(connector.mounted_paths[source_path]):
334+
return _upload_to_dataset_local(connector, host, key, datasetid, filepath)
367335

368-
return uploadedfileid
369-
else:
370-
logger.error("unable to upload file %s (not found)", filepath)
336+
url = '%sapi/uploadToDataset/%s?key=%s' % (host, datasetid, key)
371337

338+
if os.path.exists(filepath):
339+
filename = os.path.basename(filepath)
340+
m = MultipartEncoder(
341+
fields={'file': (filename, open(filepath, 'rb'))}
342+
)
343+
result = connector.post(url, data=m, headers={'Content-Type': m.content_type},
344+
verify=connector.ssl_verify if connector else True)
372345

373-
def _upload_to_dataset_local(connector, host, key, datasetid, filepath):
346+
uploadedfileid = result.json()['id']
347+
logger.debug("uploaded file id = [%s]", uploadedfileid)
348+
349+
return uploadedfileid
350+
else:
351+
logger.error("unable to upload file %s (not found)", filepath)
352+
353+
354+
def _upload_to_dataset_local(connector, host, key, datasetid, filepath, token=None):
374355
"""Upload file POINTER to existing Clowder dataset. Does not copy actual file bytes.
375356
376357
Keyword arguments:
@@ -381,27 +362,30 @@ def _upload_to_dataset_local(connector, host, key, datasetid, filepath):
381362
filepath -- path to file
382363
"""
383364

384-
logger = logging.getLogger(__name__)
385-
url = '%sapi/uploadToDataset/%s?key=%s' % (host, datasetid, key)
386-
387-
if os.path.exists(filepath):
388-
# Replace local path with remote path before uploading
389-
for source_path in connector.mounted_paths:
390-
if filepath.startswith(connector.mounted_paths[source_path]):
391-
filepath = filepath.replace(connector.mounted_paths[source_path],
392-
source_path)
393-
break
394-
395-
filename = os.path.basename(filepath)
396-
m = MultipartEncoder(
397-
fields={'file': (filename, open(filepath, 'rb'))}
398-
)
399-
result = connector.post(url, data=m, headers={'Content-Type': m.content_type},
400-
verify=connector.ssl_verify if connector else True)
365+
if clowder_version >= 2.0:
366+
v2files._upload_to_dataset_local(connector, host, key, datasetid, filepath, token)
367+
else:
368+
logger = logging.getLogger(__name__)
369+
url = '%sapi/uploadToDataset/%s?key=%s' % (host, datasetid, key)
370+
371+
if os.path.exists(filepath):
372+
# Replace local path with remote path before uploading
373+
for source_path in connector.mounted_paths:
374+
if filepath.startswith(connector.mounted_paths[source_path]):
375+
filepath = filepath.replace(connector.mounted_paths[source_path],
376+
source_path)
377+
break
378+
379+
filename = os.path.basename(filepath)
380+
m = MultipartEncoder(
381+
fields={'file': (filename, open(filepath, 'rb'))}
382+
)
383+
result = connector.post(url, data=m, headers={'Content-Type': m.content_type},
384+
verify=connector.ssl_verify if connector else True)
401385

402-
uploadedfileid = result.json()['id']
403-
logger.debug("uploaded file id = [%s]", uploadedfileid)
386+
uploadedfileid = result.json()['id']
387+
logger.debug("uploaded file id = [%s]", uploadedfileid)
404388

405-
return uploadedfileid
406-
else:
407-
logger.error("unable to upload local file %s (not found)", filepath)
389+
return uploadedfileid
390+
else:
391+
logger.error("unable to upload local file %s (not found)", filepath)

0 commit comments

Comments
 (0)