Skip to content

Commit c8411cb

Browse files
committed
formatting
1 parent 817c1c0 commit c8411cb

File tree

2 files changed

+23
-27
lines changed

2 files changed

+23
-27
lines changed

x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ static void copyAssignments(AssignmentPlan source, AssignmentPlan.Builder dest,
138138
Map<AssignmentPlan.Node, Integer> sourceNodeAssignments = source.assignments(deployment).orElse(Map.of());
139139
for (Map.Entry<AssignmentPlan.Node, Integer> sourceAssignment : sourceNodeAssignments.entrySet()) {
140140
AssignmentPlan.Node node = originalNodeById.get(sourceAssignment.getKey().id());
141-
if(dest.canAssign(deployment, node, sourceAssignment.getValue())) {
141+
if (dest.canAssign(deployment, node, sourceAssignment.getValue())) {
142142
dest.assignModelToNode(deployment, node, sourceAssignment.getValue());
143143
}
144144
}
@@ -320,8 +320,10 @@ private Map<List<String>, List<AssignmentPlan.Node>> createNodesByZoneMap() {
320320
}
321321

322322
private static long getNodeFreeMemoryExcludingPerNodeOverheadAndNativeInference(NodeLoad load) {
323-
// load.getFreeMemoryExcludingPerNodeOverhead() = maxMemory - assignedJobMemoryExcludingPerNodeOverhead - 30MB native executable code overhead
324-
// assignedJobMemoryExcludingPerNodeOverhead = assignedAnomalyDetectorMemory + assignedDataFrameAnalyticsMemory + assignedNativeInferenceMemory
323+
// load.getFreeMemoryExcludingPerNodeOverhead() = maxMemory - assignedJobMemoryExcludingPerNodeOverhead - 30MB native executable
324+
// code overhead
325+
// assignedJobMemoryExcludingPerNodeOverhead = assignedAnomalyDetectorMemory + assignedDataFrameAnalyticsMemory +
326+
// assignedNativeInferenceMemory
325327
// load.getAssignedNativeInferenceMemory() = assignedNativeInferenceMemory
326328
// TODO: (valeriy) assignedNativeInferenceMemory is double counted in the current calculation.
327329
return load.getFreeMemoryExcludingPerNodeOverhead()/* - load.getAssignedNativeInferenceMemory()*/;
@@ -412,17 +414,20 @@ private Optional<String> explainAssignment(
412414
if (Strings.isNullOrEmpty(load.getError()) == false) {
413415
return Optional.of(load.getError());
414416
}
415-
// TODO (valeriy): this test should be actually true, but it is false, because we use the "naked" deployment footprint
416-
// Get existing allocations for this node to avoid double counting
417417
int existingAllocationsOnNode = assignmentPlan.assignments(deployment)
418-
.flatMap(assignments -> assignments.entrySet().stream()
419-
.filter(entry -> entry.getKey().id().equals(node.getId()))
420-
.findFirst()
421-
.map(Map.Entry::getValue))
418+
.flatMap(
419+
assignments -> assignments.entrySet()
420+
.stream()
421+
.filter(entry -> entry.getKey().id().equals(node.getId()))
422+
.findFirst()
423+
.map(Map.Entry::getValue)
424+
)
422425
.orElse(0);
423426
int notYetAssignedAllocations = deployment.allocations() - assignmentPlan.totalAllocations(deployment);
424-
// if (deployment.estimateMemoryUsageBytes(deployment.allocations() - existingAllocationsOnNode) > assignmentPlan.getRemainingNodeMemory(node.getId())) {
425-
if (deployment.estimateAdditionalMemoryUsageBytes(existingAllocationsOnNode, existingAllocationsOnNode + notYetAssignedAllocations) > assignmentPlan.getRemainingNodeMemory(node.getId())) {
427+
if (deployment.estimateAdditionalMemoryUsageBytes(
428+
existingAllocationsOnNode,
429+
existingAllocationsOnNode + notYetAssignedAllocations
430+
) > assignmentPlan.getRemainingNodeMemory(node.getId())) {
426431
// If any ML processes are running on a node we require some space to load the shared libraries.
427432
// So if none are currently running then this per-node overhead must be added to the requirement.
428433
// From node load we know if we had any jobs or models assigned before the rebalance.

x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java

Lines changed: 7 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1268,26 +1268,18 @@ public void testRebalance_GivenDeploymentWithMemoryRequirements_ConsidersNativeE
12681268
long modelMemory = ByteSizeValue.ofMb(200).getBytes();
12691269
long memoryOverhead = ByteSizeValue.ofMb(240).getBytes();
12701270
long JVMOverhead = ByteSizeValue.ofMb(50).getBytes();
1271-
long nodeMemory = memoryOverhead + modelMemory*2 + JVMOverhead;
1271+
long nodeMemory = memoryOverhead + modelMemory * 2 + JVMOverhead;
12721272

12731273
DiscoveryNode node = buildNode("node-1", nodeMemory, 4);
12741274

12751275
String deploymentId = "model-with-overhead-test";
1276-
StartTrainedModelDeploymentAction.TaskParams taskParams = normalPriorityParams(
1277-
deploymentId,
1278-
deploymentId,
1279-
modelMemory,
1280-
1,
1281-
1
1282-
);
1276+
StartTrainedModelDeploymentAction.TaskParams taskParams = normalPriorityParams(deploymentId, deploymentId, modelMemory, 1, 1);
12831277

12841278
TrainedModelAssignmentMetadata currentMetadata = TrainedModelAssignmentMetadata.Builder.empty().build();
12851279
Map<DiscoveryNode, NodeLoad> nodeLoads = new HashMap<>();
12861280

12871281
// This node has no jobs or models yet, so the overhead should be accounted for
1288-
nodeLoads.put(node, NodeLoad.builder("node-1")
1289-
.setMaxMemory(nodeMemory)
1290-
.build());
1282+
nodeLoads.put(node, NodeLoad.builder("node-1").setMaxMemory(nodeMemory).build());
12911283

12921284
TrainedModelAssignmentMetadata result = new TrainedModelAssignmentRebalancer(
12931285
currentMetadata,
@@ -1310,18 +1302,17 @@ public void testRebalance_GivenDeploymentWithMemoryRequirements_ConsidersNativeE
13101302
DiscoveryNode insufficientNode = buildNode("node-2", insufficientNodeMemory, 4);
13111303

13121304
Map<DiscoveryNode, NodeLoad> insufficientNodeLoads = Map.of(
1313-
insufficientNode, NodeLoad.builder("node-2")
1314-
.setMaxMemory(insufficientNodeMemory)
1315-
.build()
1305+
insufficientNode,
1306+
NodeLoad.builder("node-2").setMaxMemory(insufficientNodeMemory).build()
13161307
);
13171308

13181309
TrainedModelAssignmentMetadata insufficientResult = new TrainedModelAssignmentRebalancer(
13191310
TrainedModelAssignmentMetadata.Builder.empty().build(),
13201311
insufficientNodeLoads,
13211312
Map.of(List.of(), List.of(insufficientNode)),
13221313
Optional.of(new CreateTrainedModelAssignmentAction.Request(taskParams, null)),
1323-
1)
1324-
.rebalance().build();
1314+
1
1315+
).rebalance().build();
13251316

13261317
TrainedModelAssignment insufficientAssignment = insufficientResult.getDeploymentAssignment(deploymentId);
13271318
assertThat(insufficientAssignment, is(notNullValue()));

0 commit comments

Comments
 (0)