This repository was archived by the owner on Jan 9, 2020. It is now read-only.
File tree Expand file tree Collapse file tree 6 files changed +7
-7
lines changed
mllib/src/main/scala/org/apache/spark/ml Expand file tree Collapse file tree 6 files changed +7
-7
lines changed Original file line number Diff line number Diff line change @@ -484,7 +484,7 @@ class LogisticRegression @Since("1.2.0") (
484
484
}
485
485
486
486
override protected [spark] def train (dataset : Dataset [_]): LogisticRegressionModel = {
487
- val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel .NONE
487
+ val handlePersistence = dataset.storageLevel == StorageLevel .NONE
488
488
train(dataset, handlePersistence)
489
489
}
490
490
Original file line number Diff line number Diff line change @@ -165,7 +165,7 @@ final class OneVsRestModel private[ml] (
165
165
val newDataset = dataset.withColumn(accColName, initUDF())
166
166
167
167
// persist if underlying dataset is not persistent.
168
- val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel .NONE
168
+ val handlePersistence = dataset.storageLevel == StorageLevel .NONE
169
169
if (handlePersistence) {
170
170
newDataset.persist(StorageLevel .MEMORY_AND_DISK )
171
171
}
@@ -358,7 +358,7 @@ final class OneVsRest @Since("1.4.0") (
358
358
}
359
359
360
360
// persist if underlying dataset is not persistent.
361
- val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel .NONE
361
+ val handlePersistence = dataset.storageLevel == StorageLevel .NONE
362
362
if (handlePersistence) {
363
363
multiclassLabeled.persist(StorageLevel .MEMORY_AND_DISK )
364
364
}
Original file line number Diff line number Diff line change @@ -304,7 +304,7 @@ class KMeans @Since("1.5.0") (
304
304
override def fit (dataset : Dataset [_]): KMeansModel = {
305
305
transformSchema(dataset.schema, logging = true )
306
306
307
- val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel .NONE
307
+ val handlePersistence = dataset.storageLevel == StorageLevel .NONE
308
308
val instances : RDD [OldVector ] = dataset.select(col($(featuresCol))).rdd.map {
309
309
case Row (point : Vector ) => OldVectors .fromML(point)
310
310
}
Original file line number Diff line number Diff line change @@ -213,7 +213,7 @@ class AFTSurvivalRegression @Since("1.6.0") (@Since("1.6.0") override val uid: S
213
213
override def fit (dataset : Dataset [_]): AFTSurvivalRegressionModel = {
214
214
transformSchema(dataset.schema, logging = true )
215
215
val instances = extractAFTPoints(dataset)
216
- val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel .NONE
216
+ val handlePersistence = dataset.storageLevel == StorageLevel .NONE
217
217
if (handlePersistence) instances.persist(StorageLevel .MEMORY_AND_DISK )
218
218
219
219
val featuresSummarizer = {
Original file line number Diff line number Diff line change @@ -165,7 +165,7 @@ class IsotonicRegression @Since("1.5.0") (@Since("1.5.0") override val uid: Stri
165
165
transformSchema(dataset.schema, logging = true )
166
166
// Extract columns from data. If dataset is persisted, do not persist oldDataset.
167
167
val instances = extractWeightedLabeledPoints(dataset)
168
- val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel .NONE
168
+ val handlePersistence = dataset.storageLevel == StorageLevel .NONE
169
169
if (handlePersistence) instances.persist(StorageLevel .MEMORY_AND_DISK )
170
170
171
171
val instr = Instrumentation .create(this , dataset)
Original file line number Diff line number Diff line change @@ -251,7 +251,7 @@ class LinearRegression @Since("1.3.0") (@Since("1.3.0") override val uid: String
251
251
return lrModel
252
252
}
253
253
254
- val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel .NONE
254
+ val handlePersistence = dataset.storageLevel == StorageLevel .NONE
255
255
if (handlePersistence) instances.persist(StorageLevel .MEMORY_AND_DISK )
256
256
257
257
val (featuresSummarizer, ySummarizer) = {
You can’t perform that action at this time.
0 commit comments