@@ -72,12 +72,12 @@ def setup_dask_cluster(enable_dask: bool, verbose: bool = False) -> Any | None:
7272 return client
7373
7474 except ImportError :
75- print (
76- "❌ Error: dask.distributed not available. Install with: pip install 'dask[distributed]'"
75+ log . error (
76+ "dask.distributed not available. Install with: pip install 'dask[distributed]'"
7777 )
7878 sys .exit (1 )
7979 except Exception as e :
80- log .info ( "❌ Error starting dask cluster" , error = str (e ))
80+ log .error ( " Error starting dask cluster" , error = str (e ))
8181 sys .exit (1 )
8282
8383
@@ -116,26 +116,21 @@ def convert_command(args: argparse.Namespace) -> None:
116116 log .info ("🔍 Validating S3 access..." )
117117 success , error_msg = validate_s3_access (output_path_str )
118118 if not success :
119- log . info ( "❌ Error: Cannot access S3 path" , path = output_path_str )
120- log . info ( " Reason" , error = error_msg )
121- log . info ( " \n 💡 S3 Configuration Help:" )
122- log . info ( " Make sure you have S3 credentials configured:" )
123- print (
119+ msg = (
120+ f"❌ Error: Cannot access S3 path { output_path_str } "
121+ f"Reason: { error_msg } "
122+ "💡 S3 Configuration Help:"
123+ " Make sure you have S3 credentials configured:"
124124 " - Set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables"
125- )
126- log .info (" - Set AWS_DEFAULT_REGION (default: us-east-1)" )
127- print (
125+ " - Set AWS_DEFAULT_REGION (default: us-east-1)"
128126 " - For custom S3 providers (e.g., OVH Cloud), set AWS_ENDPOINT_URL"
127+ " - Or configure AWS CLI with 'aws configure'"
128+ " - Or use IAM roles if running on EC2"
129129 )
130- log .info (" - Or configure AWS CLI with 'aws configure'" )
131- log .info (" - Or use IAM roles if running on EC2" )
132-
130+ log .error (msg )
133131 if args .verbose :
134132 creds_info = get_s3_credentials_info ()
135- log .info ("\n 🔧 Current AWS configuration:" )
136- for key , value in creds_info .items ():
137- log .info (" " , key = key , value = value or "Not set" )
138-
133+ log .info (f"🔧 Current AWS configuration: { creds_info .items ()} " )
139134 sys .exit (1 )
140135
141136 log .info ("✅ S3 access validated successfully" )
@@ -147,13 +142,13 @@ def convert_command(args: argparse.Namespace) -> None:
147142 output_path = str (output_path )
148143
149144 if args .verbose :
150- log .info ("Loading EOPF dataset from" , input_path = input_path )
151- log .info ("Groups to convert" , groups = args .groups )
152- log .info ("CRS groups" , crs_groups = args .crs_groups )
153- log .info ("Output path" , output_path = output_path )
154- log .info ("Spatial chunk size" , spatial_chunk = args .spatial_chunk )
155- log .info ("Min dimension" , min_dimension = args .min_dimension )
156- log .info ("Tile width" , tile_width = args .tile_width )
145+ log .info (f "Loading EOPF dataset from { input_path } " )
146+ log .info (f "Groups to convert: { args .groups } " )
147+ log .info (f "CRS groups: { args .crs_groups } " )
148+ log .info (f "Output path: { output_path } " )
149+ log .info (f "Spatial chunk size: { args .spatial_chunk } " )
150+ log .info (f "Min dimension: { args .min_dimension } " )
151+ log .info (f "Tile width: { args .tile_width } " )
157152
158153 # Load the EOPF DataTree with appropriate storage options
159154 log .info ("Loading EOPF dataset..." )
@@ -166,10 +161,8 @@ def convert_command(args: argparse.Namespace) -> None:
166161 )
167162
168163 if args .verbose :
169- log .info ("Loaded DataTree with groups" , group_count = len (dt .children ))
170- log .info ("Available groups:" )
171- for group_name in dt .children :
172- log .info (" -" , group_name = group_name )
164+ log .info (f"Loaded DataTree with { len (dt .children )} groups" )
165+ log .info (f"Available groups: { tuple (dt .children .keys ())} " )
173166
174167 # Convert to GeoZarr compliant format
175168 log .info ("Converting to GeoZarr compliant format..." )
@@ -192,13 +185,7 @@ def convert_command(args: argparse.Namespace) -> None:
192185 if args .verbose :
193186 # Check if dt_geozarr is a DataTree or Dataset
194187 if hasattr (dt_geozarr , "children" ):
195- log .info (
196- "Converted DataTree has groups" ,
197- group_count = len (dt_geozarr .children ),
198- )
199- log .info ("Converted groups:" )
200- for group_name in dt_geozarr .children :
201- log .info (" -" , group_name = group_name )
188+ log .info (f"Converted groups: { tuple (dt_geozarr .children .keys ())} " )
202189 else :
203190 log .info ("Converted dataset (single group)" )
204191
@@ -262,7 +249,7 @@ def info_command(args: argparse.Namespace) -> None:
262249 log .info ("Total groups" , group_count = len (dt .children ))
263250
264251 log .info ("\n Group structure:" )
265- print ( dt )
252+ log . info ( str ( dt ) )
266253
267254 except Exception as e :
268255 log .info ("❌ Error reading dataset" , error = str (e ))
0 commit comments