File tree Expand file tree Collapse file tree 5 files changed +41
-2
lines changed
java/org/apache/spark/network/util
scala/org/apache/spark/util
sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/session Expand file tree Collapse file tree 5 files changed +41
-2
lines changed Original file line number Diff line number Diff line change @@ -78,6 +78,31 @@ public static void deleteQuietly(File file) {
78
78
}
79
79
}
80
80
81
+ /** Registers the file or directory for deletion when the JVM exists. */
82
+ public static void forceDeleteOnExit (File file ) throws IOException {
83
+ if (file != null && file .exists ()) {
84
+ if (!file .isDirectory ()) {
85
+ file .deleteOnExit ();
86
+ } else {
87
+ Path path = file .toPath ();
88
+ Files .walkFileTree (path , new SimpleFileVisitor <Path >() {
89
+ @ Override
90
+ public FileVisitResult preVisitDirectory (Path p , BasicFileAttributes a )
91
+ throws IOException {
92
+ p .toFile ().deleteOnExit ();
93
+ return a .isSymbolicLink () ? FileVisitResult .SKIP_SUBTREE : FileVisitResult .CONTINUE ;
94
+ }
95
+
96
+ @ Override
97
+ public FileVisitResult visitFile (Path p , BasicFileAttributes a ) throws IOException {
98
+ p .toFile ().deleteOnExit ();
99
+ return FileVisitResult .CONTINUE ;
100
+ }
101
+ });
102
+ }
103
+ }
104
+ }
105
+
81
106
/** Move a file from src to dst. */
82
107
public static void moveFile (File src , File dst ) throws IOException {
83
108
if (src == null || dst == null || !src .exists () || src .isDirectory () || dst .exists ()) {
Original file line number Diff line number Diff line change @@ -153,6 +153,11 @@ private[spark] trait SparkFileUtils extends Logging {
153
153
JavaUtils .deleteQuietly(file)
154
154
}
155
155
156
+ /** Registers the file or directory for deletion when the JVM exists. */
157
+ def forceDeleteOnExit (file : File ): Unit = {
158
+ JavaUtils .forceDeleteOnExit(file)
159
+ }
160
+
156
161
def getFile (names : String * ): File = {
157
162
require(names != null && names.forall(_ != null ))
158
163
names.tail.foldLeft(Path .of(names.head)) { (path, part) =>
Original file line number Diff line number Diff line change 215
215
<property name =" format" value =" FileUtils\.forceDelete\(" />
216
216
<property name =" message" value =" Use deleteRecursively of JavaUtils/SparkFileUtils/Utils instead." />
217
217
</module >
218
+ <module name =" RegexpSinglelineJava" >
219
+ <property name =" format" value =" FileUtils\.forceDeleteOnExit\(" />
220
+ <property name =" message" value =" Use forceDeleteOnExit of JavaUtils/SparkFileUtils/Utils instead." />
221
+ </module >
218
222
<module name =" RegexpSinglelineJava" >
219
223
<property name =" format" value =" FileUtils\.deleteQuietly" />
220
224
<property name =" message" value =" Use deleteQuietly of JavaUtils/SparkFileUtils/Utils instead." />
Original file line number Diff line number Diff line change @@ -322,6 +322,11 @@ This file is divided into 3 sections:
322
322
<customMessage >Use deleteRecursively of JavaUtils/SparkFileUtils/Utils</customMessage >
323
323
</check >
324
324
325
+ <check customId =" forceDeleteOnExit" level =" error" class =" org.scalastyle.file.RegexChecker" enabled =" true" >
326
+ <parameters ><parameter name =" regex" >\bFileUtils\.forceDeleteOnExit\b</parameter ></parameters >
327
+ <customMessage >Use forceDeleteOnExit of JavaUtils/SparkFileUtils/Utils instead.</customMessage >
328
+ </check >
329
+
325
330
<check customId =" deleteQuietly" level =" error" class =" org.scalastyle.file.RegexChecker" enabled =" true" >
326
331
<parameters ><parameter name =" regex" >\bFileUtils\.deleteQuietly\b</parameter ></parameters >
327
332
<customMessage >Use deleteQuietly of JavaUtils/SparkFileUtils/Utils</customMessage >
Original file line number Diff line number Diff line change 28
28
import java .util .concurrent .ThreadPoolExecutor ;
29
29
import java .util .concurrent .TimeUnit ;
30
30
31
- import org .apache .commons .io .FileUtils ;
32
31
import org .apache .hadoop .hive .conf .HiveConf ;
33
32
import org .apache .hadoop .hive .conf .HiveConf .ConfVars ;
34
33
import org .apache .hive .service .CompositeService ;
43
42
import org .apache .spark .internal .SparkLoggerFactory ;
44
43
import org .apache .spark .internal .LogKeys ;
45
44
import org .apache .spark .internal .MDC ;
45
+ import org .apache .spark .network .util .JavaUtils ;
46
46
import org .apache .spark .util .Utils ;
47
47
48
48
/**
@@ -138,7 +138,7 @@ private void initOperationLogRootDir() {
138
138
LOG .info ("Operation log root directory is created: {}" ,
139
139
MDC .of (LogKeys .PATH , operationLogRootDir .getAbsolutePath ()));
140
140
try {
141
- FileUtils .forceDeleteOnExit (operationLogRootDir );
141
+ JavaUtils .forceDeleteOnExit (operationLogRootDir );
142
142
} catch (IOException e ) {
143
143
LOG .warn ("Failed to schedule cleanup HS2 operation logging root dir: {}" , e ,
144
144
MDC .of (LogKeys .PATH , operationLogRootDir .getAbsolutePath ()));
You can’t perform that action at this time.
0 commit comments