Skip to content

Commit 450a64f

Browse files
remove commented out logger.debug blocks per PR review request
1 parent ed2d015 commit 450a64f

File tree

3 files changed

+0
-177
lines changed

3 files changed

+0
-177
lines changed

src/api/providers/bedrock.ts

Lines changed: 0 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,6 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
187187
const usePromptCache = Boolean(this.options.awsUsePromptCache && this.supportsAwsPromptCache(modelConfig))
188188

189189
// Generate a conversation ID based on the first few messages to maintain cache consistency
190-
// This is a simple approach - in a real application, you might want to use a more robust ID system
191190
const conversationId =
192191
messages.length > 0
193192
? `conv_${messages[0].role}_${
@@ -232,16 +231,6 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
232231
10 * 60 * 1000,
233232
)
234233

235-
// Log the payload for debugging custom ARN issues
236-
if (this.options.awsCustomArn) {
237-
// logger.debug("Using custom ARN for Bedrock request", {
238-
// ctx: "bedrock",
239-
// customArn: this.options.awsCustomArn,
240-
// clientRegion: this.client.config.region,
241-
// payload: JSON.stringify(payload, null, 2),
242-
// })
243-
}
244-
245234
const command = new ConverseStreamCommand(payload)
246235
const response = await this.client.send(command, {
247236
abortSignal: controller.signal,
@@ -274,15 +263,6 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
274263
const cacheReadTokens = usage.cacheReadInputTokens || usage.cacheReadInputTokenCount || 0
275264
const cacheWriteTokens = usage.cacheWriteInputTokens || usage.cacheWriteInputTokenCount || 0
276265

277-
// logger.debug("Bedrock usage amounts before yielding", {
278-
// ctx: "bedrock",
279-
// inputTokens: usage.inputTokens || 0,
280-
// outputTokens: usage.outputTokens || 0,
281-
// cacheReadTokens,
282-
// cacheWriteTokens,
283-
// modelId: modelId,
284-
// })
285-
286266
// Always include all available token information
287267
yield {
288268
type: "usage",
@@ -317,15 +297,6 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
317297
const cacheWriteTokens =
318298
routerUsage.cacheWriteTokens || routerUsage.cacheWriteInputTokenCount || 0
319299

320-
// logger.debug("Bedrock prompt router usage amounts before yielding", {
321-
// ctx: "bedrock",
322-
// inputTokens: routerUsage.inputTokens || 0,
323-
// outputTokens: routerUsage.outputTokens || 0,
324-
// cacheReadTokens,
325-
// cacheWriteTokens,
326-
// invokedModelId: streamEvent.trace.promptRouter.invokedModelId,
327-
// })
328-
329300
yield {
330301
type: "usage",
331302
inputTokens: routerUsage.inputTokens || 0,
@@ -433,11 +404,7 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
433404
response.output.message.content[0].text.trim().length > 0
434405
) {
435406
try {
436-
//const outputStr = new TextDecoder().decode(response.output.message.content[0].text)
437-
//const output = JSON.parse(outputStr)
438-
//if (response.output.message.content[0].text) {
439407
return response.output.message.content[0].text
440-
//}
441408
} catch (parseError) {
442409
logger.error("Failed to parse Bedrock response", {
443410
ctx: "bedrock",

src/api/transform/cache-strategy/base-strategy.ts

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -148,24 +148,10 @@ export abstract class CacheStrategy {
148148
*/
149149
protected applyCachePoints(messages: Message[], placements: CachePointPlacement[]): Message[] {
150150
const result: Message[] = []
151-
152-
// logger.debug("Applying cache points to messages", {
153-
// ctx: "cache-strategy",
154-
// messageCount: messages.length,
155-
// placementCount: placements.length,
156-
// placements: placements.map((p) => ({ index: p.index, tokensCovered: p.tokensCovered })),
157-
// })
158-
159151
for (let i = 0; i < messages.length; i++) {
160152
const placement = placements.find((p) => p.index === i)
161153

162154
if (placement) {
163-
// logger.debug("Adding cache point to message", {
164-
// ctx: "cache-strategy",
165-
// messageIndex: i,
166-
// messageRole: messages[i].role,
167-
// tokensCovered: placement.tokensCovered,
168-
// })
169155
messages[i].content?.push(this.createCachePoint())
170156
}
171157
result.push(messages[i])

src/api/transform/cache-strategy/multi-point-strategy.ts

Lines changed: 0 additions & 130 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,6 @@ export class MultiPointStrategy extends CacheStrategy {
1414
public determineOptimalCachePoints(): CacheResult {
1515
// If prompt caching is disabled or no messages, return without cache points
1616
if (!this.config.usePromptCache || this.config.messages.length === 0) {
17-
// logger.debug("Cache points not used: prompt caching disabled or no messages", {
18-
// ctx: "cache-strategy",
19-
// usePromptCache: this.config.usePromptCache,
20-
// messageCount: this.config.messages.length,
21-
// })
2217
return this.formatWithoutCachePoints()
2318
}
2419

@@ -27,71 +22,26 @@ export class MultiPointStrategy extends CacheStrategy {
2722
const minTokensPerPoint = this.config.modelInfo.minTokensPerCachePoint
2823
let remainingCachePoints: number = this.config.modelInfo.maxCachePoints
2924

30-
// logger.debug("Starting cache point determination", {
31-
// ctx: "cache-strategy",
32-
// supportsSystemCache,
33-
// supportsMessageCache,
34-
// minTokensPerPoint,
35-
// maxCachePoints: this.config.modelInfo.maxCachePoints,
36-
// remainingCachePoints,
37-
// messageCount: this.config.messages.length,
38-
// })
39-
4025
// First, determine if we'll use a system cache point
4126
const useSystemCache =
4227
supportsSystemCache && this.config.systemPrompt && this.meetsMinTokenThreshold(this.systemTokenCount)
4328

44-
// logger.debug("System cache point evaluation", {
45-
// ctx: "cache-strategy",
46-
// supportsSystemCache,
47-
// hasSystemPrompt: !!this.config.systemPrompt,
48-
// systemTokenCount: this.systemTokenCount,
49-
// minTokensRequired: this.config.modelInfo.minTokensPerCachePoint,
50-
// meetsThreshold: this.meetsMinTokenThreshold(this.systemTokenCount),
51-
// useSystemCache,
52-
// })
53-
5429
// Handle system blocks
5530
let systemBlocks: SystemContentBlock[] = []
5631
if (this.config.systemPrompt) {
5732
systemBlocks = [{ text: this.config.systemPrompt } as unknown as SystemContentBlock]
5833
if (useSystemCache) {
59-
// logger.debug("Adding cache point after system prompt", {
60-
// ctx: "cache-strategy",
61-
// systemTokenCount: this.systemTokenCount,
62-
// })
6334
systemBlocks.push(this.createCachePoint() as unknown as SystemContentBlock)
6435
remainingCachePoints--
6536
}
6637
}
6738

6839
// If message caching isn't supported, return with just system caching
6940
if (!supportsMessageCache) {
70-
// logger.debug("Message caching not supported, using system caching only", {
71-
// ctx: "cache-strategy",
72-
// supportsMessageCache,
73-
// })
7441
return this.formatResult(systemBlocks, this.messagesToContentBlocks(this.config.messages))
7542
}
7643

77-
// Determine optimal cache point placements for messages
78-
// logger.debug("Determining message cache points", {
79-
// ctx: "cache-strategy",
80-
// minTokensPerPoint,
81-
// remainingCachePoints,
82-
// messageCount: this.config.messages.length,
83-
// hasPreviousPlacements: !!this.config.previousCachePointPlacements,
84-
// previousPlacementsCount: this.config.previousCachePointPlacements?.length || 0,
85-
// })
86-
8744
const placements = this.determineMessageCachePoints(minTokensPerPoint, remainingCachePoints)
88-
89-
// logger.debug("Cache point placements determined", {
90-
// ctx: "cache-strategy",
91-
// placementsCount: placements.length,
92-
// placements: placements.map((p) => ({ index: p.index, tokensCovered: p.tokensCovered })),
93-
// })
94-
9545
const messages = this.messagesToContentBlocks(this.config.messages)
9646
let cacheResult = this.formatResult(systemBlocks, this.applyCachePoints(messages, placements))
9747

@@ -115,62 +65,29 @@ export class MultiPointStrategy extends CacheStrategy {
11565
remainingCachePoints: number,
11666
): CachePointPlacement[] {
11767
if (this.config.messages.length <= 1) {
118-
// logger.debug("Not enough messages for cache points", {
119-
// ctx: "cache-strategy",
120-
// messageCount: this.config.messages.length,
121-
// })
12268
return []
12369
}
12470

12571
const placements: CachePointPlacement[] = []
12672
const totalMessages = this.config.messages.length
12773
const previousPlacements = this.config.previousCachePointPlacements || []
12874

129-
// logger.debug("Starting message cache point determination", {
130-
// ctx: "cache-strategy",
131-
// totalMessages,
132-
// previousPlacementsCount: previousPlacements.length,
133-
// remainingCachePoints,
134-
// })
135-
13675
// Special case: If previousPlacements is empty, place initial cache points
13776
if (previousPlacements.length === 0) {
138-
// logger.debug("No previous placements, determining initial cache points", {
139-
// ctx: "cache-strategy",
140-
// })
141-
14277
let currentIndex = 0
14378

14479
while (currentIndex < totalMessages && remainingCachePoints > 0) {
145-
// logger.debug("Finding optimal placement for range", {
146-
// ctx: "cache-strategy",
147-
// startIndex: currentIndex,
148-
// endIndex: totalMessages - 1,
149-
// minTokensPerPoint,
150-
// })
151-
15280
const newPlacement = this.findOptimalPlacementForRange(
15381
currentIndex,
15482
totalMessages - 1,
15583
minTokensPerPoint,
15684
)
15785

15886
if (newPlacement) {
159-
// logger.debug("Found optimal placement", {
160-
// ctx: "cache-strategy",
161-
// placementIndex: newPlacement.index,
162-
// tokensCovered: newPlacement.tokensCovered,
163-
// })
164-
16587
placements.push(newPlacement)
16688
currentIndex = newPlacement.index + 1
16789
remainingCachePoints--
16890
} else {
169-
// logger.debug("No suitable placement found in range", {
170-
// ctx: "cache-strategy",
171-
// startIndex: currentIndex,
172-
// endIndex: totalMessages - 1,
173-
// })
17491
break
17592
}
17693
}
@@ -244,16 +161,6 @@ export class MultiPointStrategy extends CacheStrategy {
244161
const requiredPercentageIncrease = 1.2 // 20% increase required
245162
const requiredTokenThreshold = smallestGap * requiredPercentageIncrease
246163

247-
// logger.debug("Cache point decision", {
248-
// ctx: "cache-strategy",
249-
// newMessagesTokens,
250-
// smallestGap,
251-
// requiredTokenThreshold,
252-
// shouldCombine: newMessagesTokens >= requiredTokenThreshold,
253-
// lastPreviousIndex,
254-
// totalMessages,
255-
// })
256-
257164
if (newMessagesTokens >= requiredTokenThreshold) {
258165
// It's beneficial to combine cache points since new messages have significantly more tokens
259166
logger.info("Combining cache points is beneficial", {
@@ -348,11 +255,6 @@ export class MultiPointStrategy extends CacheStrategy {
348255
minTokensPerPoint: number,
349256
): CachePointPlacement | null {
350257
if (startIndex >= endIndex) {
351-
// logger.debug("Invalid range for cache point placement", {
352-
// ctx: "cache-strategy",
353-
// startIndex,
354-
// endIndex,
355-
// })
356258
return null
357259
}
358260

@@ -365,14 +267,6 @@ export class MultiPointStrategy extends CacheStrategy {
365267
}
366268
}
367269

368-
// logger.debug("Finding last user message in range", {
369-
// ctx: "cache-strategy",
370-
// startIndex,
371-
// endIndex,
372-
// lastUserMessageIndex,
373-
// foundUserMessage: lastUserMessageIndex >= 0,
374-
// })
375-
376270
if (lastUserMessageIndex >= 0) {
377271
// Calculate the total tokens covered from the previous cache point (or start of conversation)
378272
// to this cache point. This ensures tokensCovered represents the full span of tokens
@@ -395,34 +289,10 @@ export class MultiPointStrategy extends CacheStrategy {
395289
.slice(tokenStartIndex, lastUserMessageIndex + 1)
396290
.reduce((acc, curr) => acc + this.estimateTokenCount(curr), 0)
397291

398-
// logger.debug("Evaluating potential cache point", {
399-
// ctx: "cache-strategy",
400-
// messageIndex: lastUserMessageIndex,
401-
// previousCachePointIndex,
402-
// tokenStartIndex,
403-
// totalTokensCovered,
404-
// minTokensPerPoint,
405-
// meetsThreshold: totalTokensCovered >= minTokensPerPoint,
406-
// })
407-
408292
// Guard clause: ensure we have enough tokens to justify a cache point
409293
if (totalTokensCovered < minTokensPerPoint) {
410-
// logger.debug("Not enough tokens for cache point", {
411-
// ctx: "cache-strategy",
412-
// totalTokensCovered,
413-
// minTokensPerPoint,
414-
// messageIndex: lastUserMessageIndex,
415-
// })
416294
return null
417295
}
418-
419-
// logger.debug("Creating cache point placement", {
420-
// ctx: "cache-strategy",
421-
// index: lastUserMessageIndex,
422-
// tokensCovered: totalTokensCovered,
423-
// messageRole: this.config.messages[lastUserMessageIndex].role,
424-
// })
425-
426296
return {
427297
index: lastUserMessageIndex,
428298
type: "message",

0 commit comments

Comments
 (0)