@@ -51,7 +51,7 @@ export class LangChainProvider extends AIProvider {
5151 const response : AIMessage = await this . _llm . invoke ( langchainMessages ) ;
5252
5353 // Generate metrics early (assumes success by default)
54- const metrics = LangChainProvider . createAIMetrics ( response ) ;
54+ const metrics = LangChainProvider . getAIMetricsFromResponse ( response ) ;
5555
5656 // Extract text content from the response
5757 let content : string = '' ;
@@ -106,25 +106,27 @@ export class LangChainProvider extends AIProvider {
106106 }
107107
108108 /**
109- * Create AI metrics information from a LangChain provider response.
109+ * Get AI metrics from a LangChain provider response.
110110 * This method extracts token usage information and success status from LangChain responses
111111 * and returns a LaunchDarkly AIMetrics object.
112112 *
113- * @param langChainResponse The response from the LangChain model
113+ * @param response The response from the LangChain model
114+ * @returns LDAIMetrics with success status and token usage
115+ *
114116 * @example
115117 * ```typescript
116118 * // Use with tracker.trackMetricsOf for automatic tracking
117119 * const response = await tracker.trackMetricsOf(
118- * (result: AIMessage) => LangChainProvider.createAIMetrics(result) ,
120+ * LangChainProvider.getAIMetricsFromResponse ,
119121 * () => llm.invoke(messages)
120122 * );
121123 * ```
122124 */
123- static createAIMetrics ( langChainResponse : AIMessage ) : LDAIMetrics {
125+ static getAIMetricsFromResponse ( response : AIMessage ) : LDAIMetrics {
124126 // Extract token usage if available
125127 let usage : LDTokenUsage | undefined ;
126- if ( langChainResponse ?. response_metadata ?. tokenUsage ) {
127- const { tokenUsage } = langChainResponse . response_metadata ;
128+ if ( response ?. response_metadata ?. tokenUsage ) {
129+ const { tokenUsage } = response . response_metadata ;
128130 usage = {
129131 total : tokenUsage . totalTokens || 0 ,
130132 input : tokenUsage . promptTokens || 0 ,
@@ -139,6 +141,19 @@ export class LangChainProvider extends AIProvider {
139141 } ;
140142 }
141143
144+ /**
145+ * Create AI metrics information from a LangChain provider response.
146+ * This method extracts token usage information and success status from LangChain responses
147+ * and returns a LaunchDarkly AIMetrics object.
148+ *
149+ * @deprecated Use `getAIMetricsFromResponse()` instead.
150+ * @param langChainResponse The response from the LangChain model
151+ * @returns LDAIMetrics with success status and token usage
152+ */
153+ static createAIMetrics ( langChainResponse : AIMessage ) : LDAIMetrics {
154+ return LangChainProvider . getAIMetricsFromResponse ( langChainResponse ) ;
155+ }
156+
142157 /**
143158 * Convert LaunchDarkly messages to LangChain messages.
144159 * This helper method enables developers to work directly with LangChain message types
0 commit comments