Skip to content
This repository was archived by the owner on Jan 21, 2025. It is now read-only.

Commit e1da243

Browse files
authored
Merge pull request #12 from brettkoonce/minor_sp
minor spelling tweaks
2 parents 3d8ce2b + 2b064a5 commit e1da243

File tree

6 files changed

+10
-10
lines changed

6 files changed

+10
-10
lines changed

mesh_tensorflow/beam_search.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ def beam_search(logits_fn,
125125
126126
Args:
127127
logits_fn: Interface to the model, to provide logits.
128-
Shoud take:
128+
Should take:
129129
step_num - mtf Scalar
130130
ids - mtf Tensor with shape [batch, beam, length]
131131
Should return:

mesh_tensorflow/ops.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3157,7 +3157,7 @@ def einsum(xs, output_shape=None, reduced_dims=None, name=None):
31573157
shape is set to the contain all dimensions that appear exactly once in the
31583158
inputs, in order of appearance.
31593159
3160-
If output_shape is not specifed, then the output shape is set to the contain
3160+
If output_shape is not specified, then the output shape is set to the contain
31613161
all dimensions that appear in xs but not in reduced_dims, in the order
31623162
that they appear in xs. If reduced_dims is also not specified, then
31633163
reduced_dims is set to the set of all dimensions that appear at least twice in
@@ -3488,7 +3488,7 @@ def sub(x1, x2, output_shape=None, name=None):
34883488

34893489

34903490
def multiply(x1, x2, output_shape=None, name=None):
3491-
"""Binary multiplication with broadcsting.
3491+
"""Binary multiplication with broadcasting.
34923492
34933493
Args:
34943494
x1: a Tensor
@@ -3509,7 +3509,7 @@ def multiply(x1, x2, output_shape=None, name=None):
35093509

35103510

35113511
def divide(x1, x2, output_shape=None, name=None):
3512-
"""Binary division with broadcsting.
3512+
"""Binary division with broadcasting.
35133513
35143514
Args:
35153515
x1: a Tensor

mesh_tensorflow/optimize.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,14 +34,14 @@ def make_optimizer(hparams, lr):
3434

3535

3636
class Optimizer(object):
37-
"""Base optmizer class."""
37+
"""Base optimizer class."""
3838

3939
def apply_grad(self, grad, var):
4040
raise ValueError("Apply_Grad not implemented %s %s" % (grad, var))
4141

4242

4343
class SgdOptimizer(Optimizer):
44-
"""oOptimizer implementing SGD."""
44+
"""Optimizer implementing SGD."""
4545

4646
def __init__(self, lr):
4747
self._lr = lr

mesh_tensorflow/placement_mesh_impl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ def __init__(self, variable, mesh_impl):
7878
base_name = variable.name
7979
if self.slice_is_master:
8080
tf.logging.info(
81-
"Single slice is indentical to master - avoid creating extra vars.")
81+
"Single slice is identical to master - avoid creating extra vars.")
8282
slices = [variable.get_master()]
8383
self._laid_out_tensor = mesh_impl.LaidOutTensor(slices)
8484
self._copy_slices_to_master = tf.group([])

mesh_tensorflow/simd_mesh_impl.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ def _generate_copy_master_to_slices_op(self, master_variable, master_shape,
154154
Args:
155155
master_variable: The master variable.
156156
master_shape: The shape of master variable.
157-
slices: The list of sliced varialbes.
157+
slices: The list of sliced variables.
158158
slice_shape: The shape of the slice variable.
159159
Returns:
160160
A grouped tf.assign ops.
@@ -365,7 +365,7 @@ def receive(self, x, mesh_axis, source_pcoord):
365365
return tpu_ops.collective_permute(t, source_target_pairs)
366366

367367
def slice(self, tf_tensor, tensor_shape):
368-
""""Slice out the correspoding part of tensor given the pnum variable."""
368+
""""Slice out the corresponding part of tensor given the pnum variable."""
369369
tensor_layout = self.tensor_layout(tensor_shape)
370370

371371
if tensor_layout.is_fully_replicated:

mesh_tensorflow/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def outside_all_rewrites():
3333

3434

3535
class BalancedVariablePlacer(object):
36-
"""Place the variable on different device and blance the memory usage."""
36+
"""Place the variable on different device and balance the memory usage."""
3737

3838
def __init__(self, devices, init_usage=None):
3939
init_usage = init_usage if init_usage else [0] * len(devices)

0 commit comments

Comments
 (0)