Skip to content

Commit 27f7c49

Browse files
committed
doc
1 parent f373c4a commit 27f7c49

File tree

5 files changed

+136
-4
lines changed

5 files changed

+136
-4
lines changed

README.rst

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,51 @@ or
4444

4545
pip install onnx-diagnostic
4646

47+
Snapshot of usefuls tools
48+
+++++++++++++++++++++++++
49+
50+
**string_type**
51+
52+
.. code-block:: python
53+
54+
import torch
55+
from onnx_diagnostic.helpers import string_type
56+
57+
inputs = (
58+
torch.rand((3, 4), dtype=torch.float16),
59+
[
60+
torch.rand((5, 6), dtype=torch.float16),
61+
torch.rand((5, 6, 7), dtype=torch.float16),
62+
]
63+
)
64+
65+
# with shapes
66+
print(string_type(inputs, with_shape=True))
67+
68+
::
69+
70+
>>> (T10s3x4,#2[T10s5x6,T10s5x6x7])
71+
72+
**onnx_dtype_name**
73+
74+
.. code-block:: python
75+
76+
import onnx
77+
from onnx_diagnostic.helpers import onnx_dtype_name
78+
79+
itype = onnx.TensorProto.BFLOAT16
80+
print(onnx_dtype_name(itype))
81+
print(onnx_dtype_name(7))
82+
83+
::
84+
85+
>>> BFLOAT16
86+
>>> INT64
87+
88+
**max_diff**
89+
90+
Returns the maximum discrancies accross nested containers containing tensors.
91+
4792
Documentation
4893
+++++++++++++
4994

_doc/api/index.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ API of onnx_diagnostic
1616
cache_helpers
1717
ext_test_case
1818
helpers
19+
onnx_tools
1920
ort_session
2021
torch_test_helper
2122

_doc/api/onnx_tools.rst

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
2+
onnx_diagnostic.onnx_tools
3+
==========================
4+
5+
.. automodule:: onnx_diagnostic.onnx_tools
6+
:members:
7+
:no-undoc-members:

_doc/index.rst

Lines changed: 41 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11

2-
onnx-diagnostic: fuzzy work
3-
===================================
2+
onnx-diagnostic: investigate onnx models
3+
========================================
44

55
.. image:: https://github.com/sdpython/onnx-diagnostic/actions/workflows/documentation.yml/badge.svg
66
:target: https://github.com/sdpython/onnx-diagnostic/actions/workflows/documentation.yml
@@ -45,6 +45,45 @@ Source are `sdpython/onnx-diagnostic
4545
CHANGELOGS
4646
license
4747

48+
49+
**Some usefuls tools**
50+
51+
.. code-block:: python
52+
53+
import torch
54+
from onnx_diagnostic.helpers import string_type
55+
56+
inputs = (
57+
torch.rand((3, 4), dtype=torch.float16),
58+
[
59+
torch.rand((5, 6), dtype=torch.float16),
60+
torch.rand((5, 6, 7), dtype=torch.float16),
61+
]
62+
)
63+
64+
# with shapes
65+
print(string_type(inputs, with_shape=True))
66+
67+
::
68+
69+
>>> (T10s3x4,#2[T10s5x6,T10s5x6x7])
70+
71+
.. code-block:: python
72+
73+
import onnx
74+
from onnx_diagnostic.helpers import onnx_dtype_name
75+
76+
itype = onnx.TensorProto.BFLOAT16
77+
print(onnx_dtype_name(itype))
78+
print(onnx_dtype_name(7))
79+
80+
::
81+
82+
>>> BFLOAT16
83+
>>> INT64
84+
85+
:func:`onnx_diagnostic.helpers.max_diff`, ...
86+
4887
The documentation was updated on:
4988

5089
.. runpython::

onnx_diagnostic/helpers.py

Lines changed: 42 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,30 @@ def string_type(
142142
:showcode:
143143
144144
from onnx_diagnostic.helpers import string_type
145+
145146
print(string_type((1, ["r", 6.6])))
147+
148+
With pytorch:
149+
150+
.. runpython::
151+
:showcode:
152+
153+
import torch
154+
from onnx_diagnostic.helpers import string_type
155+
156+
inputs = (
157+
torch.rand((3, 4), dtype=torch.float16),
158+
[
159+
torch.rand((5, 6), dtype=torch.float16),
160+
torch.rand((5, 6, 7), dtype=torch.float16),
161+
]
162+
)
163+
164+
# with shapes
165+
print(string_type(inputs, with_shape=True))
166+
167+
# with min max
168+
print(string_type(inputs, with_shape=True, with_min_max=True))
146169
"""
147170
if obj is None:
148171
return "None"
@@ -465,7 +488,19 @@ def string_sig(f: Callable, kwargs: Optional[Dict[str, Any]] = None) -> str:
465488

466489
@functools.cache
467490
def onnx_dtype_name(itype: int) -> str:
468-
"""Returns the ONNX name for a specific element type."""
491+
"""
492+
Returns the ONNX name for a specific element type.
493+
494+
.. runpython::
495+
:showcode:
496+
497+
import onnx
498+
from onnx_diagnostic.helpers import onnx_dtype_name
499+
500+
itype = onnx.TensorProto.BFLOAT16
501+
print(onnx_dtype_name(itype))
502+
print(onnx_dtype_name(7))
503+
"""
469504
for k in dir(TensorProto):
470505
v = getattr(TensorProto, k)
471506
if v == itype:
@@ -477,19 +512,24 @@ def pretty_onnx(
477512
onx: Union[FunctionProto, GraphProto, ModelProto, ValueInfoProto, str],
478513
with_attributes: bool = False,
479514
highlight: Optional[Set[str]] = None,
515+
shape_inference: bool = False,
480516
) -> str:
481517
"""
482518
Displays an onnx prot in a better way.
483519
484520
:param with_attributes: displays attributes as well, if only a node is printed
485521
:param highlight: to highlight some names
522+
:param shape_inference: run shape inference before printing the model
486523
:return: text
487524
"""
488525
assert onx is not None, "onx cannot be None"
489526
if isinstance(onx, str):
490527
onx = onnx_load(onx, load_external_data=False)
491528
assert onx is not None, "onx cannot be None"
492529

530+
if shape_inference:
531+
onx = onx.shape_inference.infer_shapes(onx)
532+
493533
if isinstance(onx, ValueInfoProto):
494534
name = onx.name
495535
itype = onx.type.tensor_type.elem_type
@@ -577,7 +617,7 @@ def make_hash(obj: Any) -> str:
577617

578618
def get_onnx_signature(model: ModelProto) -> Tuple[Tuple[str, Any], ...]:
579619
"""
580-
Produces a tuple of tuples correspinding to the signatures.
620+
Produces a tuple of tuples corresponding to the signatures.
581621
582622
:param model: model
583623
:return: signature

0 commit comments

Comments
 (0)