4646shutil .rmtree (tmp , ignore_errors = True )
4747os .makedirs (tmp , exist_ok = True )
4848os .makedirs (os .path .join (now_dir , "logs" ), exist_ok = True )
49- os .makedirs (os .path .join (now_dir , "assets/ weights" ), exist_ok = True )
49+ os .makedirs (os .path .join (now_dir , "assets" , " weights" ), exist_ok = True )
5050os .environ ["TEMP" ] = tmp
5151warnings .filterwarnings ("ignore" )
5252torch .manual_seed (114514 )
@@ -142,20 +142,22 @@ def forward_dml(ctx, x, scale):
142142outside_index_root = os .getenv ("outside_index_root" )
143143
144144names = []
145- for name in os .listdir (weight_root ):
146- if name .endswith (".pth" ):
147- names .append (name )
148145index_paths = []
149146
147+ def lookup_names (weight_root ):
148+ global names
149+ for name in os .listdir (weight_root ):
150+ if name .endswith (".pth" ):
151+ names .append (name )
150152
151153def lookup_indices (index_root ):
152154 global index_paths
153- for root , dirs , files in os .walk (index_root , topdown = False ):
155+ for root , _ , files in os .walk (index_root , topdown = False ):
154156 for name in files :
155157 if name .endswith (".index" ) and "trained" not in name :
156- index_paths .append ("%s/%s" % (root , name ))
157-
158+ index_paths .append (str (pathlib .Path (root , name )))
158159
160+ lookup_names (weight_root )
159161lookup_indices (index_root )
160162lookup_indices (outside_index_root )
161163uvr5_names = []
@@ -165,15 +167,12 @@ def lookup_indices(index_root):
165167
166168
167169def change_choices ():
170+ global index_paths , names
168171 names = []
169- for name in os .listdir (weight_root ):
170- if name .endswith (".pth" ):
171- names .append (name )
172+ lookup_names (weight_root )
172173 index_paths = []
173- for root , dirs , files in os .walk (index_root , topdown = False ):
174- for name in files :
175- if name .endswith (".index" ) and "trained" not in name :
176- index_paths .append ("%s/%s" % (root , name ))
174+ lookup_indices (index_root )
175+ lookup_indices (outside_index_root )
177176 return {"choices" : sorted (names ), "__type__" : "update" }, {
178177 "choices" : sorted (index_paths ),
179178 "__type__" : "update" ,
@@ -223,16 +222,17 @@ def if_done_multi(done, ps):
223222
224223def preprocess_dataset (trainset_dir , exp_dir , sr , n_p ):
225224 sr = sr_dict [sr ]
226- os .makedirs ("%s/logs/%s" % (now_dir , exp_dir ), exist_ok = True )
227- f = open ("%s/logs/%s/preprocess.log" % (now_dir , exp_dir ), "w" )
225+ exp_path = pathlib .Path (now_dir , "logs" , exp_dir )
226+ os .makedirs (exp_path , exist_ok = True )
227+ log_file_path = exp_path / "preprocess.log"
228+ f = open (log_file_path , "w" )
228229 f .close ()
229- cmd = '"%s" infer/modules/train/preprocess.py "%s" %s %s "%s/logs/%s " %s %.1f' % (
230+ cmd = '"%s" infer/modules/train/preprocess.py "%s" %s %s "%s" %s %.1f' % (
230231 config .python_cmd ,
231232 trainset_dir ,
232233 sr ,
233234 n_p ,
234- now_dir ,
235- exp_dir ,
235+ str (exp_path ),
236236 config .noparallel ,
237237 config .preprocess_per ,
238238 )
@@ -249,12 +249,12 @@ def preprocess_dataset(trainset_dir, exp_dir, sr, n_p):
249249 ),
250250 ).start ()
251251 while 1 :
252- with open ("%s/logs/%s/preprocess.log" % ( now_dir , exp_dir ) , "r" ) as f :
252+ with open (log_file_path , "r" ) as f :
253253 yield (f .read ())
254254 sleep (1 )
255255 if done [0 ]:
256256 break
257- with open ("%s/logs/%s/preprocess.log" % ( now_dir , exp_dir ) , "r" ) as f :
257+ with open (log_file_path , "r" ) as f :
258258 log = f .read ()
259259 logger .info (log )
260260 yield log
0 commit comments