Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOGS.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ Change Logs
0.7.0
+++++

* :pr:`147`: simplified log processing
* :pr:`146`: patch for IdeficsAttention, IdeficsEmbedding
* :pr:`145`: patch for _compute_dynamic_ntk_parameters (Phi3RotaryEmbedding)
* :pr:`144`: support for second inputs with different dimension,
Expand Down
1 change: 1 addition & 0 deletions _doc/api/helpers/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ onnx_diagnostic.helpers
doc_helper
graph_helper
helper
log_helper
memory_peak
mini_onnx_builder
model_builder_helper
Expand Down
7 changes: 7 additions & 0 deletions _doc/api/helpers/log_helper.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@

onnx_diagnostic.helpers.log_helper
==================================

.. automodule:: onnx_diagnostic.helpers.log_helper
:members:
:no-undoc-members:
191 changes: 191 additions & 0 deletions _unittests/ut_helpers/test_log_helper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
import io
import os
import textwrap
import unittest
import zipfile
import pandas
from onnx_diagnostic.ext_test_case import ExtTestCase, hide_stdout
from onnx_diagnostic.helpers.log_helper import (
CubeLogs,
CubeViewDef,
enumerate_csv_files,
open_dataframe,
)


class TestLogHelper(ExtTestCase):
@classmethod
def df1(cls):
return pandas.read_csv(
io.StringIO(
textwrap.dedent(
"""
date,version_python,version_transformers,model_name,model_exporter,time_load,time_latency,time_baseline,disc_ort,disc_ort2
2025/01/01,3.13.3,4.52.4,phi3,export,0.5,0.1,0.1,1e-5,1e-5
2025/01/02,3.13.3,4.52.4,phi3,export,0.6,0.11,0.1,1e-5,1e-5
2025/01/01,3.13.3,4.52.4,phi4,export,0.5,0.1,0.105,1e-5,1e-5
2025/01/01,3.12.3,4.52.4,phi4,onnx-dynamo,0.5,0.1,0.999,1e-5,1e-5
"""
)
)
)

@classmethod
def cube1(cls, verbose=0):
cube = CubeLogs(
cls.df1(),
recent=True,
formulas={"speedup": lambda df: df["time_baseline"] / df["time_baseline"]},
)
return cube.load(verbose=verbose)

@hide_stdout()
def test_cube_logs_load_df(self):
df = self.df1()
cube = CubeLogs(df)
text = str(cube)
self.assertIsInstance(text, str)
self.assertRaise(lambda: cube.load(verbose=1), AssertionError)
cube = CubeLogs(
self.df1(),
recent=True,
formulas={"speedup": lambda df: df["time_baseline"] / df["time_baseline"]},
)
cube.load(verbose=1)
text = str(cube)
self.assertIsInstance(text, str)
self.assertEqual((3, df.shape[1] + 1), cube.shape)
self.assertEqual(set(cube.columns), {*df.columns, "speedup"})

@hide_stdout()
def test_cube_logs_load_dfdf(self):
df = self.df1()
cube = CubeLogs([df, df], recent=True)
cube.load(verbose=1)
self.assertEqual((3, 10), cube.shape)

@hide_stdout()
def test_cube_logs_load_list(self):
cube = CubeLogs(
[
dict(
date="1/1/2001",
version_python="3.13",
model_exporter="A",
time_latency=5.6,
),
dict(
date="1/1/2001",
version_python="3.13",
model_exporter="B",
time_latency=5.7,
),
]
)
cube.load(verbose=1)
self.assertEqual((2, 4), cube.shape)

def test_cube_logs_view_repr(self):
v = CubeViewDef(["version.*", "model_name"], ["time_latency", "time_baseline"])
r = repr(v)
self.assertEqual(
"CubeViewDef(key_index=['version.*', 'model_name'], "
"values=['time_latency', 'time_baseline'])",
r,
)

@hide_stdout()
def test_cube_logs_view(self):
cube = self.cube1(verbose=1)
view = cube.view(
CubeViewDef(["version.*", "model_name"], ["time_latency", "time_baseline"])
)
self.assertEqual((3, 4), view.shape)
self.assertEqual(
[
("time_baseline", "export"),
("time_baseline", "onnx-dynamo"),
("time_latency", "export"),
("time_latency", "onnx-dynamo"),
],
list(view.columns),
)
self.assertEqual(
[("3.12.3", "phi4"), ("3.13.3", "phi3"), ("3.13.3", "phi4")], list(view.index)
)

view = cube.view(
CubeViewDef(
["version.*"], ["time_latency", "time_baseline"], order=["model_exporter"]
)
)
self.assertEqual((2, 6), view.shape)
self.assertEqual(
[
("time_baseline", "export", "phi3"),
("time_baseline", "export", "phi4"),
("time_baseline", "onnx-dynamo", "phi4"),
("time_latency", "export", "phi3"),
("time_latency", "export", "phi4"),
("time_latency", "onnx-dynamo", "phi4"),
],
list(view.columns),
)
self.assertEqual(["3.12.3", "3.13.3"], list(view.index))

def test_cube_logs_view_agg(self):
cube = self.cube1(verbose=0)
view = cube.view(
CubeViewDef(
["version.*", "model.*"],
["time_latency", "time_baseline"],
key_agg=["model_name"],
)
)
self.assertEqual((2, 2), view.shape)
self.assertEqual(["time_baseline", "time_latency"], list(view.columns))
self.assertEqual([("3.13.3", "export"), ("3.12.3", "onnx-dynamo")], list(view.index))

@hide_stdout()
def test_cube_logs_excel(self):
output = self.get_dump_file("test_cube_logs_excel.xlsx")
cube = self.cube1(verbose=0)
cube.to_excel(
output,
{
"example": CubeViewDef(
["version.*", "model_name"], ["time_latency", "time_baseline"]
),
"agg": CubeViewDef(
["version.*", "model.*"],
["time_latency", "time_baseline"],
key_agg=["model_name"],
),
},
verbose=1,
)
self.assertExists(output)

def test_enumerate_csv_files(self):
df = self.df1()
filename = self.get_dump_file("test_enumerate_csv_files.csv")
df.to_csv(filename, index=False)
zip_file = self.get_dump_file("test_enumerate_csv_files.zip")
with zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED) as zipf:
zipf.write(filename)

dirname = os.path.dirname(filename)
data = [os.path.join(dirname, "*.csv"), os.path.join(dirname, "*.zip")]
dfs = list(enumerate_csv_files(data, verbose=1))
self.assertNotEmpty(dfs)
for df in dfs:
open_dataframe(df)

cube = CubeLogs(data, recent=True)
cube.load(verbose=1)
self.assertEqual((3, 11), cube.shape)
self.assertIn("RAWFILENAME", cube.data.columns)


if __name__ == "__main__":
unittest.main(verbosity=2)
17 changes: 12 additions & 5 deletions _unittests/ut_helpers/test_ort_session_tinyllm.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,16 +73,23 @@ def test_ort_value_more(self):
@ignore_warnings((UserWarning, DeprecationWarning, FutureWarning))
@hide_stdout()
def test_check_allruntimes_on_tiny_llm(self):
try:
from experimental_experiment.torch_interpreter import to_onnx
except ImportError:
to_onnx = None

data = get_tiny_llm()
model, inputs, ds = data["model"], data["inputs"], data["dynamic_shapes"]
expected = model(**copy.deepcopy(inputs))

with torch_export_patches(patch_transformers=True):
ep = torch.onnx.export(
model, (), kwargs=copy.deepcopy(inputs), dynamic_shapes=ds, dynamo=True
)
with torch_export_patches(patch_transformers=True, stop_if_static=1):
if to_onnx:
proto = to_onnx(model, (), kwargs=copy.deepcopy(inputs), dynamic_shapes=ds)
else:
proto = torch.onnx.export(
model, (), kwargs=copy.deepcopy(inputs), dynamic_shapes=ds, dynamo=True
).model_proto

proto = ep.model_proto
self.dump_onnx("test_check_allruntimes_on_tiny_llm.onnx", proto)
feeds = make_feeds(proto, inputs, use_numpy=True, copy=True)
sess = onnxruntime.InferenceSession(
Expand Down
34 changes: 22 additions & 12 deletions _unittests/ut_torch_models/test_tiny_llms_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,29 +97,39 @@ def test_bypass_onnx_export_tiny_llm_official_nopositionids(self):
@ignore_warnings((UserWarning, DeprecationWarning, FutureWarning))
@hide_stdout()
def test_bypass_onnx_export_tiny_llm_official_full(self):
try:
from experimental_experiment.torch_interpreter import to_onnx
except ImportError:
to_onnx = None

data = get_tiny_llm()
model, inputs, ds = data["model"], data["inputs"], data["dynamic_shapes"]
self.assertEqual(
{"attention_mask", "past_key_values", "input_ids", "position_ids"}, set(inputs)
)
with torch_export_patches(patch_transformers=True, verbose=1) as modificator:
with torch_export_patches(
patch_transformers=True, verbose=1, stop_if_static=1
) as modificator:
new_inputs = modificator(copy.deepcopy(inputs))
ep = torch.onnx.export(
model,
(),
kwargs=new_inputs,
dynamic_shapes=ds,
dynamo=True,
optimize=True,
report=True,
verify=False,
)
if to_onnx:
proto = to_onnx(model, (), kwargs=new_inputs, dynamic_shapes=ds)
else:
proto = torch.onnx.export(
model,
(),
kwargs=new_inputs,
dynamic_shapes=ds,
dynamo=True,
optimize=True,
report=True,
verify=False,
).model_proto
# There are some discrepancies with torch==2.6
if not has_torch("2.7"):
raise unittest.SkipTest("discrepancies observed with torch<2.7")
self.assert_onnx_disc(
inspect.currentframe().f_code.co_name,
ep.model_proto,
proto,
model,
inputs,
verbose=1,
Expand Down
Loading
Loading