-
Notifications
You must be signed in to change notification settings - Fork 4.3k
Update outdated docs for torch logs #3127
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
134d92a
368e062
8859a58
7c1e3ae
26d2b0e
b7b6904
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||||||||||||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
@@ -31,52 +31,81 @@ | |||||||||||||||||||||||||||
# variable setting is shown for each example. | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
import torch | ||||||||||||||||||||||||||||
import sys | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
# exit cleanly if we are on a device that doesn't support torch.compile | ||||||||||||||||||||||||||||
if torch.cuda.get_device_capability() < (7, 0): | ||||||||||||||||||||||||||||
print("Skipping because torch.compile is not supported on this device.") | ||||||||||||||||||||||||||||
else: | ||||||||||||||||||||||||||||
@torch.compile() | ||||||||||||||||||||||||||||
def fn(x, y): | ||||||||||||||||||||||||||||
z = x + y | ||||||||||||||||||||||||||||
return z + 2 | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
def env_setup(): | ||||||||||||||||||||||||||||
"""Set up environment for running the example. Exit cleanly if CUDA is not available.""" | ||||||||||||||||||||||||||||
if not torch.cuda.is_available(): | ||||||||||||||||||||||||||||
print("CUDA is not available. Exiting.") | ||||||||||||||||||||||||||||
sys.exit(0) | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
if torch.cuda.get_device_capability() < (7, 0): | ||||||||||||||||||||||||||||
print("Skipping because torch.compile is not supported on this device.") | ||||||||||||||||||||||||||||
sys.exit(0) | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
inputs = (torch.ones(2, 2, device="cuda"), torch.zeros(2, 2, device="cuda")) | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
def separator(name): | ||||||||||||||||||||||||||||
"""Print separator and reset dynamo between each example""" | ||||||||||||||||||||||||||||
nluu175 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||||||||||||||||||||||||||||
print(f"\n{'='*20} {name} {'='*20}") | ||||||||||||||||||||||||||||
torch._dynamo.reset() | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
# print separator and reset dynamo | ||||||||||||||||||||||||||||
# between each example | ||||||||||||||||||||||||||||
def separator(name): | ||||||||||||||||||||||||||||
print(f"==================={name}=========================") | ||||||||||||||||||||||||||||
torch._dynamo.reset() | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
def run_debugging_suite(): | ||||||||||||||||||||||||||||
"""Run the complete debugging suite with all logging options""" | ||||||||||||||||||||||||||||
env_setup() | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
separator("Dynamo Tracing") | ||||||||||||||||||||||||||||
# View dynamo tracing | ||||||||||||||||||||||||||||
# TORCH_LOGS="+dynamo" | ||||||||||||||||||||||||||||
torch._logging.set_logs(dynamo=logging.DEBUG) | ||||||||||||||||||||||||||||
fn(*inputs) | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
separator("Traced Graph") | ||||||||||||||||||||||||||||
# View traced graph | ||||||||||||||||||||||||||||
# TORCH_LOGS="graph" | ||||||||||||||||||||||||||||
torch._logging.set_logs(graph=True) | ||||||||||||||||||||||||||||
fn(*inputs) | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
separator("Fusion Decisions") | ||||||||||||||||||||||||||||
# View fusion decisions | ||||||||||||||||||||||||||||
# TORCH_LOGS="fusion" | ||||||||||||||||||||||||||||
torch._logging.set_logs(fusion=True) | ||||||||||||||||||||||||||||
fn(*inputs) | ||||||||||||||||||||||||||||
@torch.compile() | ||||||||||||||||||||||||||||
def fn(x, y): | ||||||||||||||||||||||||||||
z = x + y | ||||||||||||||||||||||||||||
return z + 2 | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
separator("Output Code") | ||||||||||||||||||||||||||||
# View output code generated by inductor | ||||||||||||||||||||||||||||
# TORCH_LOGS="output_code" | ||||||||||||||||||||||||||||
torch._logging.set_logs(output_code=True) | ||||||||||||||||||||||||||||
fn(*inputs) | ||||||||||||||||||||||||||||
inputs = ( | ||||||||||||||||||||||||||||
torch.ones(2, 2, device="cuda"), | ||||||||||||||||||||||||||||
torch.zeros(2, 2, device="cuda") | ||||||||||||||||||||||||||||
) | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
logging_scenarios = [ | ||||||||||||||||||||||||||||
# View dynamo tracing; TORCH_LOGS="+dynamo" | ||||||||||||||||||||||||||||
("Dynamo Tracing", {"dynamo": logging.DEBUG}), | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
# View traced graph; TORCH_LOGS="graph" | ||||||||||||||||||||||||||||
("Traced Graph", {"graph": True}), | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
# View fusion decisions; TORCH_LOGS="fusion" | ||||||||||||||||||||||||||||
("Fusion Decisions", {"fusion": True}), | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
# View output code generated by inductor; TORCH_LOGS="output_code" | ||||||||||||||||||||||||||||
("Output Code", {"output_code": True}) | ||||||||||||||||||||||||||||
] | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
for name, log_config in logging_scenarios: | ||||||||||||||||||||||||||||
separator(name) | ||||||||||||||||||||||||||||
torch._logging.set_logs(**log_config) | ||||||||||||||||||||||||||||
try: | ||||||||||||||||||||||||||||
result = fn(*inputs) | ||||||||||||||||||||||||||||
print(f"Function output shape: {result.shape}") | ||||||||||||||||||||||||||||
except Exception as e: | ||||||||||||||||||||||||||||
print(f"Error during {name}: {str(e)}") | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
run_debugging_suite() | ||||||||||||||||||||||||||||
|
||||||||||||||||||||||||||||
separator("") | ||||||||||||||||||||||||||||
###################################################################### | ||||||||||||||||||||||||||||
# Use TORCH_TRACE/tlparse to produce produce compilation reports | ||||||||||||||||||||||||||||
|
# Use TORCH_TRACE/tlparse to produce produce compilation reports | |
# Using ``TORCH_TRACE/tlparse`` to produce produce compilation reports |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
# ~~~~~~~~~~ | |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
# In this section, we introduce the usage of TORCH_TRACE and tlparse to produce reports. | |
# In this section, we introduce ``TORCH_TRACE`` and ``tlparse`` to produce reports. |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
# First, we run `TORCH_TRACE="/tmp/tracedir" python script.py` to generate the raw trace logs. | |
# | |
# 1. Generate the raw trace logs by running the following command: | |
# | |
# .. code-block:: bash | |
# | |
# TORCH_TRACE="/tmp/tracedir" python script.py` | |
# |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
# We have replace `/tmp/tracedir` with a path to a directory you want to store the trace logs | |
# Ensure you replace ``/tmp/tracedir`` with the path to the directory where you want to store the trace logs |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
# and reaplce script with the name of your script. | |
# and replace the script with the name of your script. |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
# Next, we are going to pass the trace log to `tlparse` to generate compilation reports. We run | |
# 2. Install ``tlparse`` by running: | |
# | |
# .. code-block:: bash | |
# | |
# pip install tlparse | |
# | |
# 3. Pass the trace log to ``tlparse`` to generate compilation reports: | |
# | |
# .. code-block: bash | |
# | |
# tlparse /tmp/tracedir | |
# |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
# `pip install tlparse` and then `tlparse /tmp/tracedir`. This will open up your browser with | |
# HTML like generated above. | |
# This will open your browser with the HTML-like generated above. | |
# |
Outdated
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
# By default, reports generated by `tlparse` are | |
# By default, reports generated by ``tlparse`` are |
Uh oh!
There was an error while loading. Please reload this page.