@@ -27,6 +27,7 @@ To enable all of this, the integration code is accomplished by writing a handful
27
27
``` python
28
28
from sparseml.pytorch.optim import ScheduledModifierManager
29
29
30
+ # # fill in definitions below
30
31
model = Model() # model definition
31
32
optimizer = Optimizer() # optimizer definition
32
33
train_data = TrainData() # train data definition
@@ -58,6 +59,7 @@ To enable all of this, the integration code you'll need to write is only a handf
58
59
``` python
59
60
from sparseml.keras.optim import ScheduledModifierManager
60
61
62
+ # # fill in definitions below
61
63
model = None # your model definition
62
64
optimizer = None # your optimizer definition
63
65
num_train_batches = len (train_data) / batch_size # your number of batches per training epoch
@@ -94,6 +96,7 @@ The `ScheduledModifierManager` can override the necessary callbacks in the estim
94
96
``` python
95
97
from sparseml.tensorflow_v1.optim import ScheduledModifierManager
96
98
99
+ # # fill in definitions below
97
100
estimator = None # your estimator definition
98
101
num_train_batches = len (train_data) / batch_size # your number of batches per training epoch
99
102
@@ -118,6 +121,7 @@ from sparseml.tensorflow_v1.utils import tf_compat
118
121
from sparseml.tensorflow_v1.optim import ScheduledModifierManager
119
122
120
123
124
+ # # fill in definitions below
121
125
with tf_compat.Graph().as_default() as graph:
122
126
# Normal graph setup....
123
127
num_train_batches = len (train_data) / batch_size # your number of batches per training epoch
0 commit comments