Skip to content

Commit d7331f9

Browse files
author
annie-mac
committed
fix compiling
1 parent 860ea64 commit d7331f9

File tree

2 files changed

+13
-9
lines changed

2 files changed

+13
-9
lines changed

sdk/cosmos/azure-cosmos-spark_3-4_2-12/src/main/scala/com/azure/cosmos/spark/ChangeFeedMicroBatchStream.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ private class ChangeFeedMicroBatchStream
6767
private var latestOffsetSnapshot: Option[ChangeFeedOffset] = None
6868

6969
private val partitionIndex = new AtomicLong(0)
70-
private val partitionIndexMap = Maps.synchronizedBiMap(new HashBiMap[NormalizedRange, Long]())
70+
private val partitionIndexMap = Maps.synchronizedBiMap(HashBiMap.create[NormalizedRange, Long]())
7171
private val partitionMetricsMap = new ConcurrentHashMap[NormalizedRange, ChangeFeedMetricsTracker]()
7272

7373
session.sparkContext.addSparkListener(new ChangeFeedMetricsListener(partitionIndexMap, partitionMetricsMap))

sdk/cosmos/azure-cosmos-spark_3-5_2-12/src/main/scala/com/azure/cosmos/spark/SparkInternalsBridge.scala

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import com.azure.cosmos.implementation.guava25.base.MoreObjects.firstNonNull
66
import com.azure.cosmos.implementation.guava25.base.Strings.emptyToNull
77
import com.azure.cosmos.spark.diagnostics.BasicLoggingTrait
88
import org.apache.spark.TaskContext
9+
import org.apache.spark.executor.TaskMetrics
910
import org.apache.spark.sql.execution.metric.SQLMetric
1011
import org.apache.spark.util.AccumulatorV2
1112

@@ -40,20 +41,23 @@ object SparkInternalsBridge extends BasicLoggingTrait {
4041
private final lazy val reflectionAccessAllowed = new AtomicBoolean(getSparkReflectionAccessAllowed)
4142

4243
def getInternalCustomTaskMetricsAsSQLMetric(knownCosmosMetricNames: Set[String]) : Map[String, SQLMetric] = {
44+
Option.apply(TaskContext.get()) match {
45+
case Some(taskCtx) => getInternalCustomTaskMetricsAsSQLMetric(knownCosmosMetricNames, taskCtx.taskMetrics())
46+
case None => Map.empty[String, SQLMetric]
47+
}
48+
}
49+
50+
def getInternalCustomTaskMetricsAsSQLMetric(knownCosmosMetricNames: Set[String], taskMetrics: TaskMetrics) : Map[String, SQLMetric] = {
4351

4452
if (!reflectionAccessAllowed.get) {
4553
Map.empty[String, SQLMetric]
4654
} else {
47-
Option.apply(TaskContext.get()) match {
48-
case Some(taskCtx) => getInternalCustomTaskMetricsAsSQLMetricInternal(knownCosmosMetricNames, taskCtx)
49-
case None => Map.empty[String, SQLMetric]
50-
}
55+
getInternalCustomTaskMetricsAsSQLMetricInternal(knownCosmosMetricNames, taskMetrics)
5156
}
5257
}
5358

54-
private def getAccumulators(taskCtx: TaskContext): Option[Seq[AccumulatorV2[_, _]]] = {
59+
private def getAccumulators(taskMetrics: TaskMetrics): Option[Seq[AccumulatorV2[_, _]]] = {
5560
try {
56-
val taskMetrics: Object = taskCtx.taskMetrics()
5761
val method = Option(accumulatorsMethod.get) match {
5862
case Some(existing) => existing
5963
case None =>
@@ -78,8 +82,8 @@ object SparkInternalsBridge extends BasicLoggingTrait {
7882

7983
private def getInternalCustomTaskMetricsAsSQLMetricInternal(
8084
knownCosmosMetricNames: Set[String],
81-
taskCtx: TaskContext): Map[String, SQLMetric] = {
82-
getAccumulators(taskCtx) match {
85+
taskMetrics: TaskMetrics): Map[String, SQLMetric] = {
86+
getAccumulators(taskMetrics) match {
8387
case Some(accumulators) => accumulators
8488
.filter(accumulable => accumulable.isInstanceOf[SQLMetric]
8589
&& accumulable.name.isDefined

0 commit comments

Comments
 (0)