Skip to content

Commit 57634af

Browse files
authored
Fix heavy test (#260)
1 parent cc61f34 commit 57634af

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

tests/test_vision_models.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -210,7 +210,7 @@ def generate_text(prompt):
210210
# Generation 1 - model creates a long story
211211
prompt = "<s>[INST]Tell me a 500 word story about the bravest soul in the middle ages, and their weapon of choice[/INST]"
212212
generated_text, reporter = generate_text(prompt)
213-
assert len(reporter.events) == 2 # single batch - Begin, Finished
213+
assert len(reporter.events) == 3 # begin, update, finish
214214
begin_event = reporter.events[0]
215215
assert begin_event["type"] == "begin"
216216
assert begin_event["cached_tokens"] == 0
@@ -222,7 +222,7 @@ def generate_text(prompt):
222222
# Without caching, prompts > 512 tokens cause multi-batch processing. Ensure prompt meets that condition
223223
assert num_tokens > 512
224224
generated_text, reporter = generate_text(prompt)
225-
assert len(reporter.events) == 2 # single batch - Begin, Finished
225+
assert len(reporter.events) == 3 # begin, update, finish
226226
begin_event = reporter.events[0]
227227
assert begin_event["type"] == "begin"
228228
assert begin_event["cached_tokens"] > 0 # Cache should be used

0 commit comments

Comments
 (0)