Skip to content

Commit 7239bfe

Browse files
authored
Update babyllama generation
Differential Revision: D83066067 Pull Request resolved: #14519
1 parent b991271 commit 7239bfe

File tree

1 file changed

+5
-3
lines changed

1 file changed

+5
-3
lines changed

examples/cadence/models/babyllama.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,10 @@
1414

1515
from executorch.backends.cadence.aot.export_example import export_and_run_model
1616

17-
from executorch.examples.models.llama.llama_transformer import ModelArgs, Transformer
18-
17+
from executorch.examples.models.llama.llama_transformer import (
18+
construct_transformer,
19+
ModelArgs,
20+
)
1921

2022
FORMAT = "[%(levelname)s %(asctime)s %(filename)s:%(lineno)s] %(message)s"
2123
logging.basicConfig(level=logging.INFO, format=FORMAT)
@@ -32,7 +34,7 @@ def main() -> None:
3234
)
3335
seq = 64
3436
b = 1
35-
model = Transformer(args)
37+
model = construct_transformer(args)
3638
example_inputs = (torch.randint(0, 10, [b, seq], dtype=torch.int64),)
3739

3840
export_and_run_model(model, example_inputs)

0 commit comments

Comments
 (0)