1+ # This file is part of the CoverageControl library
2+ #
3+ # Author: Saurav Agarwal
4+ 5+ # Repository: https://github.com/KumarRobotics/CoverageControl
6+ #
7+ # Copyright (c) 2024, Saurav Agarwal
8+ #
9+ # The CoverageControl library is free software: you can redistribute it and/or
10+ # modify it under the terms of the GNU General Public License as published by
11+ # the Free Software Foundation, either version 3 of the License, or (at your
12+ # option) any later version.
13+ #
14+ # The CoverageControl library is distributed in the hope that it will be
15+ # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
16+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
17+ # Public License for more details.
18+ #
19+ # You should have received a copy of the GNU General Public License along with
20+ # CoverageControl library. If not, see <https://www.gnu.org/licenses/>.
21+
22+ # @file data_generation.py
23+ # This file contains the code to generate a dataset for learning
24+ # Prefer using simple_data_generation.py for generating a dataset
25+ #
26+
27+ # DataDir = "~/CoverageControl_ws/src/CoverageControl/" # Absolute path to the root of the repository
28+ # EnvironmentConfig = "params/coverage_control_params.toml" # Relative to DataDir
29+ #
30+ # NumDataset = 1000
31+ #
32+ # # Number of steps to take before data is stores
33+ # # This helps in creating a more diverse dataset
34+ # EveryNumSteps = 5
35+ #
36+ # # The robots stop moving once the algorithm has converged
37+ # # Having some of these converged steps can help in stabilizing robot actions
38+ # ConvergedDataRatio = 0.25
39+ #
40+ # # Resizing of maps and Sparsification of tensors are triggered every TriggerPostProcessing dataset
41+ # # This should be set based on RAM resources available on the system
42+ # TriggerPostProcessing = 100
43+ #
44+ # CNNMapSize = 32
45+ # SaveAsSparseQ = true
46+ # NormalizeQ = true
47+ #
48+ # [DataSetSplit]
49+ # TrainRatio = 0.7
50+ # ValRatio = 0.2
51+
152import os
253import sys
354import torch
4- import datetime
555import math
56+ import pathlib
57+ import datetime
658
7- import pyCoverageControl as CoverageControl
8- from pyCoverageControl import CoverageSystem
9- from pyCoverageControl import LloydGlobalOnline as CoverageAlgorithm
10- # from pyCoverageControl import OracleGlobalOffline as CoverageAlgorithm
11- # from pyCoverageControl import LloydLocalSensorGlobalComm as CoverageAlgorithm
12- import CoverageControlTorch as cct
13- import CoverageControlTorch .data_loaders .data_loader_utils as dl_utils
14- from CoverageControlTorch .utils .coverage_system import ToTensor
15- import CoverageControlTorch .utils .coverage_system as CoverageSystemUtils
16-
59+ import coverage_control
60+ from coverage_control import IOUtils
61+ from coverage_control import CoverageSystem
62+ from coverage_control .algorithms import ClairvoyantCVT as CoverageAlgorithm
63+ from coverage_control .nn import CoverageEnvUtils
1764
1865class DatasetGenerator ():
19- def __init__ (self , config_file ):
66+ def __init__ (self , config_file , append_dir = None ):
2067
21- # Load configs and create directories
22- self .config = dl_utils .LoadToml (config_file )
23- self .data_dir = self .config ['DataDir' ]
24- self .data_folder = self .data_dir + '/data/'
68+ self .config = IOUtils .load_toml (config_file )
69+ self .data_dir = IOUtils .sanitize_path (self .config ['DataDir' ])
70+ self .dataset_dir = self .data_dir + '/data/'
71+ if append_dir is not None :
72+ self .dataset_dir += append_dir
2573
26- if not os .path .exists (self .data_dir ):
27- throw ("Data directory does not exist" )
74+ if not pathlib .Path (self .data_dir ).exists ():
75+ print (f'{ self .data_dir } does not exist' )
76+ exit ()
2877
29- if not os .path .exists (self .data_folder ):
30- os .makedirs (self .data_folder )
78+ self .dataset_dir_path = pathlib .Path (self .dataset_dir )
79+ if not self .dataset_dir_path .exists ():
80+ os .makedirs (self .dataset_dir )
3181
32- env_config_file = self .data_dir + self .config ["EnvironmentConfig" ]
33- if not os .path .exists (env_config_file ):
34- throw ("Environment config file does not exist" )
82+ env_config_file = IOUtils .sanitize_path (self .config ["EnvironmentConfig" ])
83+ env_config_file = pathlib .Path (env_config_file )
84+ if not env_config_file .exists ():
85+ print (f'{ env_config_file } does not exist' )
86+ exit ()
3587
36- self .env_params = CoverageControl .Parameters (env_config_file )
88+ self .env_params = coverage_control .Parameters (env_config_file . as_posix () )
3789
3890 # Initialize variables
3991 self .dataset_count = 0
@@ -69,12 +121,12 @@ def __init__(self, config_file):
69121 self .edge_weights = torch .zeros ((self .num_dataset , self .num_robots , self .num_robots ))
70122
71123 # Write metrics
72- self .metrics_file = self .data_folder + 'metrics.txt'
124+ self .metrics_file = self .dataset_dir_path / 'metrics.txt'
73125 self .metrics = open (self .metrics_file , 'w' )
74126 # Get current time
75127 start_time = datetime .datetime .now ()
76128 self .metrics .write ('Time: ' + str (datetime .datetime .now ()) + '\n ' )
77- self .metrics .write ('Data directory: ' + self .data_dir + '\n ' )
129+ self .metrics .write ('Dataset directory: ' + self .dataset_dir + '\n ' )
78130 self .PrintTensorSizes ()
79131 self .PrintTensorSizes (self .metrics )
80132 self .metrics .flush ()
@@ -113,16 +165,17 @@ def RunDataGeneration(self):
113165 converged_data_count += 1
114166
115167 def StepWithSave (self ):
116- converged = not self .alg .Step ()
168+ self .alg .ComputeActions ()
169+ converged = self .alg .IsConverged ()
117170 actions = self .alg .GetActions ()
118171 count = self .dataset_count
119- self .actions [count ] = ToTensor (actions )
120- self .robot_positions [count ] = CoverageSystemUtils . GetRobotPositions (self .env )
121- self .coverage_features [count ] = CoverageSystemUtils . GetVoronoiFeatures (self .env )
122- self .raw_local_maps [self .trigger_count ] = CoverageSystemUtils . GetRawLocalMaps (self .env , self .env_params )
123- self .raw_obstacle_maps [self .trigger_count ] = CoverageSystemUtils . GetRawObstacleMaps (self .env , self .env_params )
124- self .comm_maps [count ] = CoverageSystemUtils . GetCommunicationMaps (self .env , self .env_params , self .cnn_map_size )
125- self .edge_weights [count ] = CoverageSystemUtils . GetWeights (self .env , self .env_params )
172+ self .actions [count ] = CoverageEnvUtils . to_tensor (actions )
173+ self .robot_positions [count ] = CoverageEnvUtils . get_robot_positions (self .env )
174+ self .coverage_features [count ] = CoverageEnvUtils . get_voronoi_features (self .env )
175+ self .raw_local_maps [self .trigger_count ] = CoverageEnvUtils . get_raw_local_maps (self .env , self .env_params )
176+ self .raw_obstacle_maps [self .trigger_count ] = CoverageEnvUtils . get_raw_obstacle_maps (self .env , self .env_params )
177+ self .comm_maps [count ] = CoverageEnvUtils . get_communication_maps (self .env , self .env_params , self .cnn_map_size )
178+ self .edge_weights [count ] = CoverageEnvUtils . get_weights (self .env , self .env_params )
126179 self .dataset_count += 1
127180 if self .dataset_count % 100 == 0 :
128181 print (f'Dataset: { self .dataset_count } /{ self .num_dataset } ' )
@@ -141,12 +194,12 @@ def TriggerPostProcessing(self):
141194 trigger_end_idx = min (self .num_dataset , self .trigger_start_idx + self .trigger_size )
142195 raw_local_maps = self .raw_local_maps [0 :trigger_end_idx - self .trigger_start_idx ]
143196 raw_local_maps = raw_local_maps .to (self .device )
144- resized_local_maps = CoverageSystemUtils . ResizeMaps (raw_local_maps , self .cnn_map_size )
197+ resized_local_maps = CoverageEnvUtils . resize_maps (raw_local_maps , self .cnn_map_size )
145198 self .local_maps [self .trigger_start_idx :trigger_end_idx ] = resized_local_maps .view (- 1 , self .num_robots , self .cnn_map_size , self .cnn_map_size ).cpu ().clone ()
146199
147200 raw_obstacle_maps = self .raw_obstacle_maps [0 :trigger_end_idx - self .trigger_start_idx ]
148201 raw_obstacle_maps = raw_obstacle_maps .to (self .device )
149- resized_obstacle_maps = CoverageSystemUtils . ResizeMaps (raw_obstacle_maps , self .cnn_map_size )
202+ resized_obstacle_maps = CoverageEnvUtils . resize_maps (raw_obstacle_maps , self .cnn_map_size )
150203 self .obstacle_maps [self .trigger_start_idx :trigger_end_idx ] = resized_obstacle_maps .view (- 1 , self .num_robots , self .cnn_map_size , self .cnn_map_size ).cpu ().clone ()
151204
152205 self .trigger_start_idx = trigger_end_idx
@@ -176,23 +229,24 @@ def SaveTensor(self, tensor, name, as_sparse=False):
176229 validation_tensor = validation_tensor .to_sparse ()
177230 test_tensor = test_tensor .to_sparse ()
178231
179- torch .save (train_tensor , self .data_folder + '/train/' + name )
180- torch .save (validation_tensor , self .data_folder + '/val/' + name )
181- torch .save (test_tensor , self .data_folder + '/test/' + name )
232+ dataset_dir_path = pathlib .Path (self .dataset_dir )
233+ torch .save (train_tensor , dataset_dir_path / 'train/' / name )
234+ torch .save (validation_tensor , dataset_dir_path / 'val/' / name )
235+ torch .save (test_tensor , dataset_dir_path / 'test/' / name )
182236
183237 def SaveDataset (self ):
184238 as_sparse = self .config ['SaveAsSparseQ' ]
185- self .train_size = int (self .num_dataset * self .config ['DatasetSplit ' ]['TrainRatio' ])
186- self .validation_size = int (self .num_dataset * self .config ['DatasetSplit ' ]['ValRatio' ])
239+ self .train_size = int (self .num_dataset * self .config ['DataSetSplit ' ]['TrainRatio' ])
240+ self .validation_size = int (self .num_dataset * self .config ['DataSetSplit ' ]['ValRatio' ])
187241 self .test_size = self .num_dataset - self .train_size - self .validation_size
188242
189243 # Make sure the folder exists
190- if not os .path .exists (self .data_folder + '/train' ):
191- os .makedirs (self .data_folder + '/train' )
192- if not os .path .exists (self .data_folder + '/val' ):
193- os .makedirs (self .data_folder + '/val' )
194- if not os .path .exists (self .data_folder + '/test' ):
195- os .makedirs (self .data_folder + '/test' )
244+ if not os .path .exists (self .dataset_dir + '/train' ):
245+ os .makedirs (self .dataset_dir + '/train' )
246+ if not os .path .exists (self .dataset_dir + '/val' ):
247+ os .makedirs (self .dataset_dir + '/val' )
248+ if not os .path .exists (self .dataset_dir + '/test' ):
249+ os .makedirs (self .dataset_dir + '/test' )
196250
197251 self .SaveTensor (self .robot_positions , 'robot_positions.pt' )
198252 self .SaveTensor (self .local_maps , 'local_maps.pt' , as_sparse )
@@ -201,8 +255,8 @@ def SaveDataset(self):
201255
202256 # min_val, range_val = self.NormalizeCommunicationMaps()
203257 self .SaveTensor (self .comm_maps , 'comm_maps.pt' , as_sparse )
204- # torch.save(min_val, self.data_folder + '/ comm_maps_min.pt')
205- # torch.save(range_val, self.data_folder + '/ comm_maps_range.pt')
258+ # torch.save(min_val, self.dataset_dir / ' comm_maps_min.pt')
259+ # torch.save(range_val, self.dataset_dir / ' comm_maps_range.pt')
206260
207261 self .SaveTensor (self .actions , 'actions.pt' )
208262 self .SaveTensor (self .coverage_features , 'coverage_features.pt' )
@@ -212,16 +266,18 @@ def SaveDataset(self):
212266 coverage_features , coverage_features_mean , coverage_features_std = self .NormalizeTensor (self .coverage_features )
213267 self .SaveTensor (normalized_actions , 'normalized_actions.pt' )
214268 self .SaveTensor (coverage_features , 'normalized_coverage_features.pt' )
215- torch .save (actions_mean , self .data_folder + '/ actions_mean.pt' )
216- torch .save (actions_std , self .data_folder + '/ actions_std.pt' )
217- torch .save (coverage_features_mean , self .data_folder + '/ coverage_features_mean.pt' )
218- torch .save (coverage_features_std , self .data_folder + '/ coverage_features_std.pt' )
269+ torch .save (actions_mean , self .dataset_dir_path / ' actions_mean.pt' )
270+ torch .save (actions_std , self .dataset_dir_path / ' actions_std.pt' )
271+ torch .save (coverage_features_mean , self .dataset_dir_path / ' coverage_features_mean.pt' )
272+ torch .save (coverage_features_std , self .dataset_dir_path / ' coverage_features_std.pt' )
219273
220274
221275 def StepWithoutSave (self ):
222- converged = not self .alg .Step ()
223- error_flag = self .env .StepActions (self .alg .GetActions ())
224- return converged or error_flag
276+ self .alg .ComputeActions ()
277+ converged = self .alg .IsConverged ()
278+ if (self .env .StepActions (self .alg .GetActions ())):
279+ return True
280+ return converged
225281
226282 def GetTensorByteSizeMB (self , tensor ):
227283 return (tensor .element_size () * tensor .nelement ()) / (1024 * 1024 )
@@ -240,6 +296,8 @@ def PrintTensorSizes(self, file=sys.stdout):
240296
241297if __name__ == '__main__' :
242298 config_file = sys .argv [1 ]
299+ if len (sys .argv ) > 2 :
300+ append_folder = sys .argv [2 ]
301+ else :
302+ append_folder = None
243303 DatasetGenerator (config_file )
244-
245-
0 commit comments