diff --git a/src/DPR/DPR_processor_mock.py b/src/DPR/DPR_processor_mock.py index b8c208a..a16de60 100644 --- a/src/DPR/DPR_processor_mock.py +++ b/src/DPR/DPR_processor_mock.py @@ -164,6 +164,19 @@ def update_product(self, path: pathlib.Path, ptype): #data['stac_discovery']['id'] = new_product_id self.meta_attrs.append(data) + def unzip_if_needed(self, path: pathlib.Path) -> None: + # Check if the file has a .zip extension + if path.suffix.lower() == ".zip" and path.is_file(): + extract_dir = path.parent / path.stem # create folder with same name + extract_dir.mkdir(exist_ok=True) + + # Unzip the file + with zipfile.ZipFile(path, 'r') as zip_ref: + zip_ref.extractall(extract_dir) + + return extract_dir + return path + def upload_to_s3(self, path: pathlib.Path, ptype): """To be added. Should update products to a given s3 storage.""" bucket_path = [out['path'] for out in self.payload_data["I/O"]["output_products"] if ptype == out['id']][0].split("/") @@ -173,6 +186,8 @@ def upload_to_s3(self, path: pathlib.Path, ptype): bucket_path[2], "/".join(bucket_path[3:]), ) + path = self.unzip_if_needed(path) + s3_config.files = [path] logger.info("S3 config: %s %s %s", [str(path.absolute().resolve())], bucket_path[2], "/".join(bucket_path[3:])) handler = S3StorageHandler( os.environ["S3_ACCESSKEY"], @@ -266,7 +281,7 @@ def update_product_name(self, path: pathlib.Path, crc: str): if __name__ == "__main__": parser = argparse.ArgumentParser(description="Starts the DPR processor mockup") - default_payload_file = "src/DPR/payload.yaml" + default_payload_file = "payload.yaml" parser.add_argument( "-p", "--payload", diff --git a/src/DPR/common/s3_handler.py b/src/DPR/common/s3_handler.py index d9750ac..bbc9ced 100644 --- a/src/DPR/common/s3_handler.py +++ b/src/DPR/common/s3_handler.py @@ -238,7 +238,7 @@ def files_to_be_uploaded(self, paths): """ list_with_files = [] for local in paths: - path = local.strip() + path = str(local).strip() # check if it is a file self.logger.debug("path = %s", path) if os.path.isfile(path): @@ -493,7 +493,6 @@ def put_files_to_s3(self, config: Any) -> list: self.logger.debug("locals = %s", locals()) collection_files = self.files_to_be_uploaded(config.files) - try: self.check_bucket_access(config.bucket) except RuntimeError: