diff --git a/source/_static/code/en/basic/eager/grad.py b/source/_static/code/en/basic/eager/grad.py index 39a35455..1fa6f66a 100644 --- a/source/_static/code/en/basic/eager/grad.py +++ b/source/_static/code/en/basic/eager/grad.py @@ -11,6 +11,6 @@ w = tf.Variable(initial_value=[[1.], [2.]]) b = tf.Variable(initial_value=1.) with tf.GradientTape() as tape: - L = 0.5 * tf.reduce_sum(tf.square(tf.matmul(X, w) + b - y)) + L = tf.reduce_sum(tf.square(tf.matmul(X, w) + b - y)) w_grad, b_grad = tape.gradient(L, [w, b]) # 计算L(w, b)关于w, b的偏导数 -print([L.numpy(), w_grad.numpy(), b_grad.numpy()]) \ No newline at end of file +print([L.numpy(), w_grad.numpy(), b_grad.numpy()])