@@ -53,7 +53,7 @@ export class LangChainProvider extends AIProvider {
5353 const response : AIMessage = await this . _llm . invoke ( langchainMessages ) ;
5454
5555 // Generate metrics early (assumes success by default)
56- const metrics = LangChainProvider . createAIMetrics ( response ) ;
56+ const metrics = LangChainProvider . getAIMetricsFromResponse ( response ) ;
5757
5858 // Extract text content from the response
5959 let content : string = '' ;
@@ -170,25 +170,27 @@ export class LangChainProvider extends AIProvider {
170170 }
171171
172172 /**
173- * Create AI metrics information from a LangChain provider response.
173+ * Get AI metrics from a LangChain provider response.
174174 * This method extracts token usage information and success status from LangChain responses
175175 * and returns a LaunchDarkly AIMetrics object.
176176 *
177- * @param langChainResponse The response from the LangChain model
177+ * @param response The response from the LangChain model
178+ * @returns LDAIMetrics with success status and token usage
179+ *
178180 * @example
179181 * ```typescript
180182 * // Use with tracker.trackMetricsOf for automatic tracking
181183 * const response = await tracker.trackMetricsOf(
182- * (result: AIMessage) => LangChainProvider.createAIMetrics(result) ,
184+ * LangChainProvider.getAIMetricsFromResponse ,
183185 * () => llm.invoke(messages)
184186 * );
185187 * ```
186188 */
187- static createAIMetrics ( langChainResponse : AIMessage ) : LDAIMetrics {
189+ static getAIMetricsFromResponse ( response : AIMessage ) : LDAIMetrics {
188190 // Extract token usage if available
189191 let usage : LDTokenUsage | undefined ;
190- if ( langChainResponse ?. response_metadata ?. tokenUsage ) {
191- const { tokenUsage } = langChainResponse . response_metadata ;
192+ if ( response ?. response_metadata ?. tokenUsage ) {
193+ const { tokenUsage } = response . response_metadata ;
192194 usage = {
193195 total : tokenUsage . totalTokens || 0 ,
194196 input : tokenUsage . promptTokens || 0 ,
@@ -203,6 +205,19 @@ export class LangChainProvider extends AIProvider {
203205 } ;
204206 }
205207
208+ /**
209+ * Create AI metrics information from a LangChain provider response.
210+ * This method extracts token usage information and success status from LangChain responses
211+ * and returns a LaunchDarkly AIMetrics object.
212+ *
213+ * @deprecated Use `getAIMetricsFromResponse()` instead.
214+ * @param langChainResponse The response from the LangChain model
215+ * @returns LDAIMetrics with success status and token usage
216+ */
217+ static createAIMetrics ( langChainResponse : AIMessage ) : LDAIMetrics {
218+ return LangChainProvider . getAIMetricsFromResponse ( langChainResponse ) ;
219+ }
220+
206221 /**
207222 * Convert LaunchDarkly messages to LangChain messages.
208223 * This helper method enables developers to work directly with LangChain message types
0 commit comments