Skip to content

Commit 47442b6

Browse files
authored
Update EIS sparse and dense embedding max batch size to 16 (#132646) (#132661)
1 parent 1c51c9e commit 47442b6

File tree

2 files changed

+13
-3
lines changed

2 files changed

+13
-3
lines changed

docs/changelog/132646.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 132646
2+
summary: Update EIS sparse and dense embedding max batch size to 16
3+
area: Machine Learning
4+
type: bug
5+
issues: []

x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,10 @@ public class ElasticInferenceService extends SenderService {
8989
public static final String NAME = "elastic";
9090
public static final String ELASTIC_INFERENCE_SERVICE_IDENTIFIER = "Elastic Inference Service";
9191
public static final Integer DENSE_TEXT_EMBEDDINGS_DIMENSIONS = 1024;
92-
public static final Integer SPARSE_TEXT_EMBEDDING_MAX_BATCH_SIZE = 512;
92+
// The maximum batch size for sparse text embeddings is set to 16.
93+
// This value was reduced from 512 due to memory constraints; batch sizes above 32 can cause GPU out-of-memory errors.
94+
// A batch size of 16 provides optimal throughput and stability, especially on lower-tier instance types.
95+
public static final Integer SPARSE_TEXT_EMBEDDING_MAX_BATCH_SIZE = 16;
9396

9497
private static final EnumSet<TaskType> IMPLEMENTED_TASK_TYPES = EnumSet.of(
9598
TaskType.SPARSE_EMBEDDING,
@@ -99,8 +102,10 @@ public class ElasticInferenceService extends SenderService {
99102
);
100103
private static final String SERVICE_NAME = "Elastic";
101104

102-
// TODO: check with team, what makes the most sense
103-
private static final Integer DENSE_TEXT_EMBEDDINGS_MAX_BATCH_SIZE = 32;
105+
// TODO: revisit this value once EIS supports dense models
106+
// The maximum batch size for dense text embeddings is proactively set to 16.
107+
// This mirrors the memory constraints observed with sparse embeddings
108+
private static final Integer DENSE_TEXT_EMBEDDINGS_MAX_BATCH_SIZE = 16;
104109

105110
// rainbow-sprinkles
106111
static final String DEFAULT_CHAT_COMPLETION_MODEL_ID_V1 = "rainbow-sprinkles";

0 commit comments

Comments
 (0)