Skip to content
This repository was archived by the owner on Jul 10, 2025. It is now read-only.

Commit 60cf538

Browse files
committed
Correctly format diffed code.
1 parent 789ce19 commit 60cf538

File tree

1 file changed

+17
-17
lines changed

1 file changed

+17
-17
lines changed

rfcs/20191203-single-eager-graph-path.md

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -48,11 +48,11 @@ References:
4848
Basically we want to get rid of the graph-building part in gen_*_ops.py and get rid of gradient tape bookkeeping in both graph and eager modes. For example:
4949

5050

51-
```
51+
```diff
5252
def batch_matrix_band_part(input, num_lower, num_upper, name=None):
5353
_ctx = _context._context or _context.context()
5454
tld = _ctx._thread_local_data
55-
~~if tld.is_eager:~~
55+
- if tld.is_eager:
5656
try:
5757
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
5858
_ctx._context_handle, tld.device_name, "BatchMatrixBandPart", name,
@@ -66,18 +66,18 @@ def batch_matrix_band_part(input, num_lower, num_upper, name=None):
6666
pass # Add nodes to the TensorFlow graph.
6767
except _core._NotOkStatusException as e:
6868
_ops.raise_from_not_ok_status(e, name)
69-
~~# Add nodes to the TensorFlow graph.
70-
_, _, _op, _outputs = _op_def_library._apply_op_helper(
71-
"BatchMatrixBandPart", input=input, num_lower=num_lower,
72-
num_upper=num_upper, name=name)
73-
_result = _outputs[:]
74-
if _execute.must_record_gradient():
75-
_attrs = ("T", _op._get_attr_type("T"))
76-
_inputs_flat = _op.inputs
77-
_execute.record_gradient(
78-
"BatchMatrixBandPart", _inputs_flat, _attrs, _result)
79-
_result, = _result
80-
return _result~~
69+
- # Add nodes to the TensorFlow graph.
70+
- _, _, _op, _outputs = _op_def_library._apply_op_helper(
71+
- "BatchMatrixBandPart", input=input, num_lower=num_lower,
72+
- num_upper=num_upper, name=name)
73+
- _result = _outputs[:]
74+
- if _execute.must_record_gradient():
75+
- _attrs = ("T", _op._get_attr_type("T"))
76+
- _inputs_flat = _op.inputs
77+
- _execute.record_gradient(
78+
- "BatchMatrixBandPart", _inputs_flat, _attrs, _result)
79+
- _result, = _result
80+
- return _result~~
8181

8282
def batch_matrix_band_part_eager_fallback(input, num_lower, num_upper, name, ctx):
8383
_attr_T, (input,) = _execute.args_to_matching_eager([input], ctx)
@@ -87,9 +87,9 @@ def batch_matrix_band_part_eager_fallback(input, num_lower, num_upper, name, ctx
8787
_attrs = ("T", _attr_T)
8888
_result = _execute.execute(b"BatchMatrixBandPart", 1, inputs=_inputs_flat,
8989
attrs=_attrs, ctx=ctx, name=name)
90-
~~if _execute.must_record_gradient():
91-
_execute.record_gradient(
92-
"BatchMatrixBandPart", _inputs_flat, _attrs, _result)~~
90+
- if _execute.must_record_gradient():
91+
- _execute.record_gradient(
92+
- "BatchMatrixBandPart", _inputs_flat, _attrs, _result)
9393
_result, = _result
9494
return _result
9595
```

0 commit comments

Comments
 (0)