File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
src/main/scala/ldbc/snb/datagen/io Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -92,7 +92,7 @@ object graphs {
92
92
SparkUI .job(getClass.getSimpleName, s " write $tpe" ) {
93
93
val p = (sink.path / " graphs" / sink.format / PathComponent [GraphLike [M ]].path(self) / tpe.entityPath).toString
94
94
log.info(s " $tpe: Writing started " )
95
- val opts = getFormatOptions(sink.format, self.mode)
95
+ val opts = getFormatOptions(sink.format, self.mode, sink.formatOptions )
96
96
the(dataset).write(DataFrameSink (p, sink.format, opts, SaveMode .Ignore ))
97
97
log.info(s " $tpe: Writing completed " )
98
98
}(dataset.sparkSession)
@@ -109,7 +109,7 @@ object graphs {
109
109
import CacheFriendlyEntityOrdering ._
110
110
111
111
override def write (self : Graph [M ], sink : GraphSink ): Unit = {
112
- val opts = getFormatOptions(sink.format, self.mode)
112
+ val opts = getFormatOptions(sink.format, self.mode, sink.formatOptions )
113
113
TreeMap (self.entities.mapValues(ev).toSeq: _* ).foreach {
114
114
case (tpe, BatchedEntity (snapshot, insertBatches, deleteBatches)) =>
115
115
SparkUI .job(getClass.getSimpleName, s " write $tpe snapshot " ) {
You can’t perform that action at this time.
0 commit comments