|
1 | 1 | --[[ |
2 | 2 | The Tiny Lua Compiler Performance Test Suite |
| 3 | +
|
| 4 | + Lua 5.1 benchmark results (over 5000 iterations): |
| 5 | + Tokenizer: 175.9758 seconds (51.36%) - 28.41 iterations/second |
| 6 | + Parser: 43.9889 seconds (12.84%) - 113.66 iterations/second |
| 7 | + CodeGenerator: 48.5441 seconds (14.17%) - 103.00 iterations/second |
| 8 | + Compiler: 74.1044 seconds (21.63%) - 67.47 iterations/second |
| 9 | + Total: 342.6132 seconds |
| 10 | +
|
| 11 | + LuaJIT benchmark results (over 5000 iterations): |
| 12 | + Tokenizer: 20.8723 seconds (9.91%) - 239.55 iterations/second |
| 13 | + Parser: 11.6405 seconds (5.53%) - 429.54 iterations/second |
| 14 | + CodeGenerator: 158.4915 seconds (75.24%) - 31.55 iterations/second |
| 15 | + Compiler: 19.6458 seconds (9.33%) - 254.51 iterations/second |
| 16 | + Total: 210.6501 seconds |
3 | 17 | --]] |
4 | 18 |
|
5 | 19 | --* Imports *-- |
6 | | -local tlc = require("the-tiny-lua-compiler") |
| 20 | +local tlc = require("./../the-tiny-lua-compiler") |
7 | 21 |
|
8 | 22 | -- Constants -- |
9 | | -local ITERATIONS = 100 |
10 | | -local TLC_CODE = io.open("the-tiny-lua-compiler.lua"):read("*a") |
| 23 | +local ITERATIONS = 500 |
| 24 | +local TLC_CODE = io.open("./the-tiny-lua-compiler.lua", "r"):read("*a") |
11 | 25 |
|
12 | 26 | --* Functions *-- |
13 | | -local function compile(code) |
14 | | - local tokens = tlc.Tokenizer.new(code):tokenize() |
15 | | - local ast = tlc.Parser.new(tokens):parse() |
16 | | - local proto = tlc.CodeGenerator.new(ast):generate() |
17 | | - local bytecode = tlc.Compiler.new(proto):compile() |
18 | | - return bytecode |
| 27 | +local function benchmark_tokenizer(code) |
| 28 | + local start = os.clock() |
| 29 | + for _ = 1, ITERATIONS do |
| 30 | + tlc.Tokenizer.new(code):tokenize() |
| 31 | + end |
| 32 | + return os.clock() - start |
19 | 33 | end |
20 | 34 |
|
21 | | -local function benchmark() |
| 35 | +local function benchmark_parser(tokens) |
| 36 | + local start = os.clock() |
| 37 | + for _ = 1, ITERATIONS do |
| 38 | + tlc.Parser.new(tokens):parse() |
| 39 | + end |
| 40 | + return os.clock() - start |
| 41 | +end |
| 42 | + |
| 43 | +local function benchmark_codegenerator(ast) |
22 | 44 | local start = os.clock() |
23 | 45 | for _ = 1, ITERATIONS do |
24 | | - compile(TLC_CODE) |
| 46 | + tlc.CodeGenerator.new(ast):generate() |
25 | 47 | end |
26 | | - local elapsed = os.clock() - start |
| 48 | + return os.clock() - start |
| 49 | +end |
| 50 | + |
| 51 | +local function benchmark_compiler(proto) |
| 52 | + local start = os.clock() |
| 53 | + for _ = 1, ITERATIONS do |
| 54 | + tlc.Compiler.new(proto):compile() |
| 55 | + end |
| 56 | + return os.clock() - start |
| 57 | +end |
| 58 | + |
| 59 | +local function benchmark() |
| 60 | + local tokens = tlc.Tokenizer.new(TLC_CODE):tokenize() |
| 61 | + local ast = tlc.Parser.new(tokens):parse() |
| 62 | + local proto = tlc.CodeGenerator.new(ast):generate() |
| 63 | + |
| 64 | + local tokenizer_elapsed = benchmark_tokenizer(TLC_CODE) |
| 65 | + local parser_elapsed = benchmark_parser(tokens) |
| 66 | + local codegenerator_elapsed = benchmark_codegenerator(ast) |
| 67 | + local compiler_elapsed = benchmark_compiler(proto) |
| 68 | + |
| 69 | + local total_elapsed = tokenizer_elapsed + parser_elapsed + codegenerator_elapsed + compiler_elapsed |
| 70 | + local tokenizer_elapsed_perc = (tokenizer_elapsed / total_elapsed) * 100 |
| 71 | + local parser_elapsed_perc = (parser_elapsed / total_elapsed) * 100 |
| 72 | + local codegenerator_elapsed_perc = (codegenerator_elapsed / total_elapsed) * 100 |
| 73 | + local compiler_elapsed_perc = (compiler_elapsed / total_elapsed) * 100 |
| 74 | + local tokenizer_iters_per_sec = ITERATIONS / tokenizer_elapsed |
| 75 | + local parser_iters_per_sec = ITERATIONS / parser_elapsed |
| 76 | + local codegenerator_iters_per_sec = ITERATIONS / codegenerator_elapsed |
| 77 | + local compiler_iters_per_sec = ITERATIONS / compiler_elapsed |
27 | 78 |
|
28 | | - print(string.format("Compiled %d times in %.2f seconds", ITERATIONS, elapsed)) |
| 79 | + print(string.format("Benchmark Results (over %d iterations):", ITERATIONS)) |
| 80 | + print(string.format("Tokenizer: %.4f seconds (%.2f%%) - %.2f iterations/second", tokenizer_elapsed, |
| 81 | + tokenizer_elapsed_perc, tokenizer_iters_per_sec)) |
| 82 | + print(string.format("Parser: %.4f seconds (%.2f%%) - %.2f iterations/second", parser_elapsed, |
| 83 | + parser_elapsed_perc, parser_iters_per_sec)) |
| 84 | + print(string.format("CodeGenerator: %.4f seconds (%.2f%%) - %.2f iterations/second", codegenerator_elapsed, |
| 85 | + codegenerator_elapsed_perc, codegenerator_iters_per_sec)) |
| 86 | + print(string.format("Compiler: %.4f seconds (%.2f%%) - %.2f iterations/second", compiler_elapsed, |
| 87 | + compiler_elapsed_perc, compiler_iters_per_sec)) |
| 88 | + print(string.format("Total: %.4f seconds", total_elapsed)) |
29 | 89 | end |
30 | 90 |
|
31 | 91 | --* Main *-- |
|
0 commit comments