Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOGS.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ Change Logs
0.7.14
++++++

* :pr:`250`: add variables to track sequence nodes
* :pr:`249`: patches _maybe_broadcast to support a corner case

0.7.13
Expand Down
2 changes: 1 addition & 1 deletion _unittests/ut_helpers/test_log_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ def test_cube_logs_performance_cube_time(self):
cube = CubeLogsPerformance(dfs, keep_last_date=True)
cube.load()
ct = cube.clone()
self.assertEqual((52, 111), ct.shape)
self.assertEqual((52, 116), ct.shape)

def test_duplicate(self):
df = pandas.DataFrame(
Expand Down
52 changes: 39 additions & 13 deletions onnx_diagnostic/helpers/log_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -1167,7 +1167,7 @@ def to_excel(
df.to_excel(
writer,
sheet_name=name,
freeze_panes=(df.columns.nlevels + df.index.nlevels, df.index.nlevels),
freeze_panes=(df.columns.nlevels + 1, df.index.nlevels),
)
f_highlights[name] = tview.f_highlight
if tview.plots:
Expand Down Expand Up @@ -1222,15 +1222,15 @@ def to_excel(
writer,
sheet_name=name,
freeze_panes=(
sbs_raw.columns.nlevels + sbs_raw.index.nlevels,
sbs_raw.columns.nlevels + 1,
sbs_raw.index.nlevels,
),
)
sbs_agg.to_excel(
writer,
sheet_name=f"{name}-AGG",
freeze_panes=(
sbs_agg.columns.nlevels + sbs_agg.index.nlevels,
sbs_agg.columns.nlevels + 1,
sbs_agg.index.nlevels,
),
)
Expand Down Expand Up @@ -1456,9 +1456,7 @@ def _mkc(m, s):


class CubeLogsPerformance(CubeLogs):
"""
Processes logs coming from experiments.
"""
"""Processes logs coming from experiments."""

def __init__(
self,
Expand Down Expand Up @@ -1511,20 +1509,25 @@ def __init__(
"n_model_faster2x",
"n_model_faster3x",
"n_model_faster4x",
"n_model_faster5x",
"n_node_attention",
"n_node_attention23",
"n_node_rotary_embedding",
"n_node_rotary_embedding23",
"n_node_layer_normalization",
"n_node_layer_normalization23",
"n_node_causal_mask",
"n_node_constant",
"n_node_control_flow",
"n_node_scatter",
"n_node_expand",
"n_node_function",
"n_node_gqa",
"n_node_initializer",
"n_node_initializer_small",
"n_node_constant",
"n_node_layer_normalization",
"n_node_layer_normalization23",
"n_node_reshape",
"n_node_rotary_embedding",
"n_node_rotary_embedding23",
"n_node_scatter",
"n_node_sequence",
"n_node_shape",
"n_node_expand",
"onnx_n_nodes_no_cst",
"peak_gpu_torch",
"peak_gpu_nvidia",
Expand Down Expand Up @@ -1690,6 +1693,11 @@ def first_err(df: pandas.DataFrame) -> pandas.Series:
"time_latency",
gdf(df, "time_latency_eager") > gdf(df, "time_latency", np.inf) * 3.98,
),
n_model_faster5x=lambda df: gpreserve(
df,
"time_latency",
gdf(df, "time_latency_eager") > gdf(df, "time_latency", np.inf) * 4.98,
),
n_node_attention23=lambda df: gpreserve(
df, "time_latency_eager", gdf(df, "op_onnx__Attention")
),
Expand Down Expand Up @@ -1720,6 +1728,11 @@ def first_err(df: pandas.DataFrame) -> pandas.Series:
+ gdf(df, "op_onnx_com.microsoft_DecoderMaskedMultiHeadAttention", 0)
+ gdf(df, "op_onnx_com.microsoft_SparseAttention", 0),
),
n_node_gqa=lambda df: gpreserve(
df,
"time_latency_eager",
gdf(df, "op_onnx_com.microsoft_GroupQueryAttention", 0),
),
n_node_layer_normalization=lambda df: gpreserve(
df,
"time_latency_eager",
Expand Down Expand Up @@ -1764,9 +1777,22 @@ def first_err(df: pandas.DataFrame) -> pandas.Series:
n_node_shape=lambda df: gpreserve(
df, "time_latency_eager", gdf(df, "op_onnx__Shape")
),
n_node_reshape=lambda df: gpreserve(
df, "time_latency_eager", gdf(df, "op_onnx__Reshape")
),
n_node_expand=lambda df: gpreserve(
df, "time_latency_eager", gdf(df, "op_onnx__Expand")
),
n_node_causal_mask=lambda df: gpreserve(
df,
"time_latency_eager",
gdf(df, "op_onnx__CausalMask", 0),
),
n_node_sequence=lambda df: gpreserve(
df,
"time_latency_eager",
gdf(df, "op_onnx__SequenceAt", 0) + gdf(df, "op_onnx__SplitToSequence", 0),
),
)
assert (
formula in lambdas
Expand Down
1 change: 1 addition & 0 deletions onnx_diagnostic/torch_models/validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -1744,6 +1744,7 @@ def _simplify(p):
"constant_folding",
"remove_identity",
"remove_duplicated_initializer",
"remove_duplicated_shape",
"dynamic_dimension_naming",
"inline",
"check",
Expand Down
Loading