Skip to content

Commit 637bd4d

Browse files
committed
Fix: Make prodinfo check failure not an error
1 parent 26f219b commit 637bd4d

File tree

3 files changed

+29
-24
lines changed

3 files changed

+29
-24
lines changed

charon/pkgs/npm.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -113,12 +113,12 @@ def handle_npm_uploading(
113113
succeeded = True
114114
for target in targets:
115115
manifest_folder = target[0]
116-
logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name)
117116
if not manifest_bucket_name:
118117
logger.warning(
119118
'Warning: No manifest bucket is provided, will ignore the process of manifest '
120119
'uploading\n')
121120
else:
121+
logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name)
122122
manifest_name, manifest_full_path = write_manifest(valid_paths, target_dir, product)
123123
client.upload_manifest(
124124
manifest_name, manifest_full_path,
@@ -216,13 +216,18 @@ def handle_npm_del(
216216
)
217217
logger.info("Files deletion done\n")
218218

219-
manifest_folder = target[0]
220-
logger.info(
221-
"Start deleting manifest from s3 bucket %s",
222-
manifest_bucket_name
223-
)
224-
client.delete_manifest(product, manifest_folder, manifest_bucket_name)
225-
logger.info("Manifest deletion is done\n")
219+
if manifest_bucket_name:
220+
manifest_folder = target[0]
221+
logger.info(
222+
"Start deleting manifest from s3 bucket %s in folder %s",
223+
manifest_bucket_name, manifest_folder
224+
)
225+
client.delete_manifest(product, manifest_folder, manifest_bucket_name)
226+
logger.info("Manifest deletion is done\n")
227+
else:
228+
logger.warning(
229+
'Warning: No manifest bucket is provided, will ignore the process of manifest '
230+
'deletion\n')
226231

227232
logger.info(
228233
"Start generating package.json for package: %s in bucket %s",

charon/storage.py

Lines changed: 9 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -203,12 +203,10 @@ async def path_upload_handler(
203203
failed.append(full_file_path)
204204
return
205205
else:
206-
succeeded = await handle_existed(
206+
await handle_existed(
207207
full_file_path, sha1, main_path_key,
208-
main_bucket_name, main_file_object, failed
208+
main_bucket_name, main_file_object
209209
)
210-
if not succeeded:
211-
return
212210

213211
# do copy:
214212
for target_ in extra_prefixed_buckets:
@@ -241,12 +239,12 @@ async def path_upload_handler(
241239
else:
242240
await handle_existed(
243241
full_file_path, sha1, extra_path_key,
244-
extra_bucket_name, file_object, failed
242+
extra_bucket_name, file_object
245243
)
246244

247245
async def handle_existed(
248246
file_path, file_sha1, path_key,
249-
bucket_name, file_object, failed_paths
247+
bucket_name, file_object
250248
) -> bool:
251249
logger.debug(
252250
"File %s already exists in bucket %s, check if need to update product.",
@@ -257,10 +255,9 @@ async def handle_existed(
257255
f_meta[CHECKSUM_META_KEY] if CHECKSUM_META_KEY in f_meta else ""
258256
)
259257
if checksum != "" and checksum.strip() != file_sha1:
260-
logger.warning('Error: checksum check failed. The file %s is '
258+
logger.warning('Warning: checksum check failed. The file %s is '
261259
'different from the one in S3 bucket %s. Product: %s',
262260
path_key, bucket_name, product)
263-
failed_paths.append(file_path)
264261
return False
265262
(prods, no_error) = await self.__run_async(
266263
self.__get_prod_info,
@@ -277,7 +274,6 @@ async def handle_existed(
277274
path_key, bucket_name, prods
278275
)
279276
if not result:
280-
failed_paths.append(file_path)
281277
return False
282278
return True
283279

@@ -703,8 +699,8 @@ async def __update_prod_info(
703699
logger.debug("Updated product infomation for file %s", file)
704700
return True
705701
except (ClientError, HTTPClientError) as e:
706-
logger.error("ERROR: Can not update product info for file %s "
707-
"due to error: %s", file, e)
702+
logger.warning("WARNING: Can not update product info for file %s "
703+
"due to error: %s", file, e)
708704
return False
709705
else:
710706
logger.debug("Removing product infomation file for file %s "
@@ -724,8 +720,8 @@ async def __update_prod_info(
724720
logger.debug("Removed product infomation file for file %s", file)
725721
return True
726722
except (ClientError, HTTPClientError) as e:
727-
logger.error("ERROR: Can not delete product info file for file %s "
728-
"due to error: %s", file, e)
723+
logger.warning("WARNING: Can not delete product info file for file %s "
724+
"due to error: %s", file, e)
729725
return False
730726

731727
def __path_handler_count_wrapper(

tests/test_s3client.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
limitations under the License.
1515
"""
1616
from typing import List
17-
1817
from boto3_type_annotations import s3
1918
from charon.storage import S3Client, CHECKSUM_META_KEY
2019
from charon.utils.archive import extract_zip_all
@@ -375,16 +374,21 @@ def test_exists_override_failing(self):
375374
product="apache-commons", root=temp_root
376375
)
377376
self.assertEqual(0, len(failed_paths))
377+
sha1 = read_sha1(all_files[0])
378+
path = all_files[0][len(temp_root) + 1:]
378379

379380
# Change content to make hash changes
380381
with open(all_files[0], "w+", encoding="utf-8") as f:
381382
f.write("changed content")
383+
sha1_changed = read_sha1(all_files[0])
384+
self.assertNotEqual(sha1, sha1_changed)
382385
failed_paths = self.s3_client.upload_files(
383386
all_files, targets=[(MY_BUCKET, None)],
384387
product="apache-commons-2", root=temp_root
385388
)
386-
self.assertEqual(1, len(failed_paths))
387-
self.assertIn(failed_paths[0], all_files[0])
389+
bucket = self.mock_s3.Bucket(MY_BUCKET)
390+
file_obj = bucket.Object(path)
391+
self.assertEqual(sha1, file_obj.metadata[CHECKSUM_META_KEY])
388392

389393
def __prepare_files(self):
390394
test_zip = zipfile.ZipFile(

0 commit comments

Comments
 (0)