@@ -95,7 +95,7 @@ private InferenceFeatureSetUsage collectUsage(List<ModelConfigurations> endpoint
9595 mapInferenceFieldsByIndexServiceAndTask (indicesMetadata , endpoints );
9696 Map <String , ModelStats > endpointStats = new TreeMap <>();
9797 addStatsByServiceAndTask (inferenceFieldsByIndexServiceAndTask , endpoints , endpointStats );
98- addStatsForDefaultModels (inferenceFieldsByIndexServiceAndTask , endpoints , endpointStats );
98+ addStatsForDefaultModelsCompatibleWithSemanticText (inferenceFieldsByIndexServiceAndTask , endpoints , endpointStats );
9999 return new InferenceFeatureSetUsage (endpointStats .values ());
100100 }
101101
@@ -159,10 +159,10 @@ private static void addStatsByServiceAndTask(
159159 endpointStats .get (serviceAndTaskType .toString ())
160160 )
161161 );
162- addTopLevelSemanticTextStatsByTask (inferenceFieldsByIndexServiceAndTask , endpointStats );
162+ addTopLevelStatsByTask (inferenceFieldsByIndexServiceAndTask , endpointStats );
163163 }
164164
165- private static void addTopLevelSemanticTextStatsByTask (
165+ private static void addTopLevelStatsByTask (
166166 Map <ServiceAndTaskType , Map <String , List <InferenceFieldMetadata >>> inferenceFieldsByIndexServiceAndTask ,
167167 Map <String , ModelStats > endpointStats
168168 ) {
@@ -174,14 +174,20 @@ private static void addTopLevelSemanticTextStatsByTask(
174174 new ServiceAndTaskType (Metadata .ALL , taskType ).toString (),
175175 key -> new ModelStats (Metadata .ALL , taskType )
176176 );
177- Map <String , List <InferenceFieldMetadata >> inferenceFieldsByIndex = inferenceFieldsByIndexServiceAndTask .entrySet ()
178- .stream ()
179- .filter (e -> e .getKey ().taskType == taskType )
180- .flatMap (m -> m .getValue ().entrySet ().stream ())
181- .collect (
182- Collectors .toMap (Map .Entry ::getKey , Map .Entry ::getValue , (l1 , l2 ) -> Stream .concat (l1 .stream (), l2 .stream ()).toList ())
183- );
184- addSemanticTextStats (inferenceFieldsByIndex , allStatsForTaskType );
177+ if (taskType .isCompatibleWithSemanticText ()) {
178+ Map <String , List <InferenceFieldMetadata >> inferenceFieldsByIndex = inferenceFieldsByIndexServiceAndTask .entrySet ()
179+ .stream ()
180+ .filter (e -> e .getKey ().taskType == taskType )
181+ .flatMap (m -> m .getValue ().entrySet ().stream ())
182+ .collect (
183+ Collectors .toMap (
184+ Map .Entry ::getKey ,
185+ Map .Entry ::getValue ,
186+ (l1 , l2 ) -> Stream .concat (l1 .stream (), l2 .stream ()).toList ()
187+ )
188+ );
189+ addSemanticTextStats (inferenceFieldsByIndex , allStatsForTaskType );
190+ }
185191 }
186192 }
187193
@@ -196,20 +202,21 @@ private static void addSemanticTextStats(Map<String, List<InferenceFieldMetadata
196202 }
197203
198204 /**
199- * Adds stats for default models. In particular, default models are considered models that are
200- * associated with default inference endpoints as per the {@code ModelRegistry}. The service name
201- * for default model stats is "_{service}_{modelId}". Each of those stats contains usage for all
202- * endpoints that use that model, including non-default endpoints.
205+ * Adds stats for default models that are compatible with semantic_text.
206+ * In particular, default models are considered models that are associated with default inference
207+ * endpoints as per the {@code ModelRegistry}. The service name for default model stats is "_{service}_{modelId}".
208+ * Each of those stats contains usage for all endpoints that use that model, including non-default endpoints.
203209 */
204- private void addStatsForDefaultModels (
210+ private void addStatsForDefaultModelsCompatibleWithSemanticText (
205211 Map <ServiceAndTaskType , Map <String , List <InferenceFieldMetadata >>> inferenceFieldsByIndexServiceAndTask ,
206212 List <ModelConfigurations > endpoints ,
207213 Map <String , ModelStats > endpointStats
208214 ) {
209215 Map <String , String > endpointIdToModelId = endpoints .stream ()
210216 .filter (endpoint -> endpoint .getServiceSettings ().modelId () != null )
211217 .collect (Collectors .toMap (ModelConfigurations ::getInferenceEntityId , e -> stripLinuxSuffix (e .getServiceSettings ().modelId ())));
212- Map <DefaultModelStatsKey , Long > defaultModelsToEndpointCount = createDefaultStatsKeysWithEndpointCounts (endpoints );
218+ Map <DefaultModelStatsKey , Long > defaultModelsToEndpointCount =
219+ createStatsKeysWithEndpointCountsForDefaultModelsCompatibleWithSemanticText (endpoints );
213220 for (Map .Entry <DefaultModelStatsKey , Long > defaultModelStatsKeyToEndpointCount : defaultModelsToEndpointCount .entrySet ()) {
214221 DefaultModelStatsKey statKey = defaultModelStatsKeyToEndpointCount .getKey ();
215222 Map <String , List <InferenceFieldMetadata >> fieldsByIndex = inferenceFieldsByIndexServiceAndTask .getOrDefault (
@@ -225,11 +232,14 @@ private void addStatsForDefaultModels(
225232 }
226233 }
227234
228- private Map <DefaultModelStatsKey , Long > createDefaultStatsKeysWithEndpointCounts (List <ModelConfigurations > endpoints ) {
235+ private Map <DefaultModelStatsKey , Long > createStatsKeysWithEndpointCountsForDefaultModelsCompatibleWithSemanticText (
236+ List <ModelConfigurations > endpoints
237+ ) {
229238 // We consider models to be default if they are associated with a default inference endpoint.
230239 // Note that endpoints could have a null model id, in which case we don't consider them default as this
231240 // may only happen for external services.
232241 Set <String > modelIds = endpoints .stream ()
242+ .filter (endpoint -> endpoint .getTaskType ().isCompatibleWithSemanticText ())
233243 .filter (endpoint -> modelRegistry .containsDefaultConfigId (endpoint .getInferenceEntityId ()))
234244 .filter (endpoint -> endpoint .getServiceSettings ().modelId () != null )
235245 .map (endpoint -> stripLinuxSuffix (endpoint .getServiceSettings ().modelId ()))
0 commit comments