Skip to content

Commit d51ea08

Browse files
committed
fixes
1 parent c22e406 commit d51ea08

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

optillm/autothink/processor.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -345,9 +345,8 @@ def process(self, messages: List[Dict[str, str]]) -> str:
345345
for hook, _ in self.steering_hooks:
346346
# Update token history with the new token
347347
hook.update_token_history([next_token])
348-
# Check for matches occasionally during generation
349-
if random.random() < 0.1: # 10% chance per token
350-
hook.try_match()
348+
# Check for matches on EVERY token
349+
hook.try_match()
351350

352351
tokens = torch.tensor([[next_token]]).to(tokens.device)
353352

optillm/autothink/steering.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -545,21 +545,22 @@ def try_match(self):
545545
Try to match the current context with a steering vector.
546546
Only allows one pattern to be selected for the entire generation.
547547
"""
548-
# If we already have an active pattern for this generation, don't try to match again
549-
if self.generation_started and self.active_pattern:
548+
# If we already have an active pattern, don't try to match again
549+
if self.active_pattern:
550550
return False
551551

552-
# Only attempt pattern matching at the beginning of generation
553-
self.generation_started = True
554-
555552
# Use token-based matching or text-based matching as appropriate
553+
match_result = False
556554
if self.tokenizer is not None and hasattr(self.manager, 'tokenized_contexts') and self.manager.tokenized_contexts:
557555
# Token-based matching (similar to guided mode)
558556
match_result = self._try_token_match()
559557
else:
560558
# Text-based matching as fallback
561559
match_result = self._try_text_match()
562560

561+
# Set generation started flag AFTER trying to match
562+
self.generation_started = True
563+
563564
# If a match is found, set this as the permanent pattern for this generation
564565
if match_result and self.current_vector:
565566
new_pattern = self.current_vector.get("reasoning_pattern", "unknown")

0 commit comments

Comments
 (0)