Skip to content

Commit c709a04

Browse files
authored
Merge pull request #13018 from panyx0718/name
support op_namescope for better debugging
2 parents af15f6f + 7dff0b1 commit c709a04

File tree

11 files changed

+159
-19
lines changed

11 files changed

+159
-19
lines changed

paddle/fluid/API.spec

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ paddle.fluid.default_startup_program ArgSpec(args=[], varargs=None, keywords=Non
3636
paddle.fluid.default_main_program ArgSpec(args=[], varargs=None, keywords=None, defaults=None)
3737
paddle.fluid.program_guard ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
3838
paddle.fluid.get_var ArgSpec(args=['name', 'program'], varargs=None, keywords=None, defaults=(None,))
39+
paddle.fluid.name_scope ArgSpec(args=[], varargs='args', keywords='kwds', defaults=None)
3940
paddle.fluid.Executor.__init__ ArgSpec(args=['self', 'place'], varargs=None, keywords=None, defaults=None)
4041
paddle.fluid.Executor.close ArgSpec(args=['self'], varargs=None, keywords=None, defaults=None)
4142
paddle.fluid.Executor.run ArgSpec(args=['self', 'program', 'feed', 'fetch_list', 'feed_var_name', 'fetch_var_name', 'scope', 'return_numpy', 'use_program_cache'], varargs=None, keywords=None, defaults=(None, None, None, 'feed', 'fetch', None, True, False))

paddle/fluid/framework/ir/graph_viz_pass.cc

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,27 @@ limitations under the License. */
1616
#include <unordered_set>
1717

1818
#include "paddle/fluid/framework/ir/graph_viz_pass.h"
19+
#include "paddle/fluid/framework/op_proto_maker.h"
1920
#include "paddle/fluid/inference/analysis/dot.h"
21+
#include "paddle/fluid/string/printf.h"
2022

2123
namespace paddle {
2224
namespace framework {
2325
namespace ir {
24-
static const char kGraphVizPath[] = "graph_viz_path";
2526
using inference::analysis::Dot;
27+
namespace {
28+
const char kGraphVizPath[] = "graph_viz_path";
29+
30+
std::string FormatName(const Node* node) {
31+
if (!node->IsOp() || !node->Op() ||
32+
!node->Op()->HasAttr(OpProtoAndCheckerMaker::OpNamescopeAttrName())) {
33+
return node->Name();
34+
}
35+
const std::string full_scope = boost::get<std::string>(
36+
node->Op()->GetAttr(OpProtoAndCheckerMaker::OpNamescopeAttrName()));
37+
return string::Sprintf("%s%s", full_scope.c_str(), node->Name().c_str());
38+
}
39+
} // namespace
2640

2741
std::unique_ptr<ir::Graph> GraphVizPass::ApplyImpl(
2842
std::unique_ptr<ir::Graph> graph) const {
@@ -54,7 +68,7 @@ std::unique_ptr<ir::Graph> GraphVizPass::ApplyImpl(
5468
auto marked_nodes = ConsumeMarkedNodes(graph.get());
5569
// Create nodes
5670
for (const Node* n : graph->Nodes()) {
57-
std::string node_id = n->Name() + "(" + std::to_string(n->id()) + ")";
71+
std::string node_id = FormatName(n) + "(" + std::to_string(n->id()) + ")";
5872
if (n->IsOp()) {
5973
decltype(op_attrs) attr =
6074
marked_nodes.count(n) ? marked_op_attrs : op_attrs;

paddle/fluid/framework/ir/node.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,11 +55,11 @@ class Node {
5555
std::string Name() const { return name_; }
5656

5757
VarDesc* Var() {
58-
PADDLE_ENFORCE(type_ == Type::kVariable);
58+
PADDLE_ENFORCE(IsVar());
5959
return var_desc_.get();
6060
}
6161

62-
OpDesc* Op() {
62+
OpDesc* Op() const {
6363
PADDLE_ENFORCE(IsOp());
6464
return op_desc_.get();
6565
}

paddle/fluid/framework/op_proto_maker.cc

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,9 @@ void OpProtoAndCheckerMaker::operator()(proto::OpProto* proto,
129129
"Optimized for variable")
130130
.SetDefault({});
131131

132+
AddAttr<std::string>(OpNamescopeAttrName(), "Operator name with namesope.")
133+
.SetDefault("");
134+
132135
Validate();
133136
}
134137

paddle/fluid/framework/op_proto_maker.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ class OpProtoAndCheckerMaker {
3939
public:
4040
static const char *OpRoleAttrName() { return "op_role"; }
4141
static const char *OpRoleVarAttrName() { return "op_role_var"; }
42+
static const char *OpNamescopeAttrName() { return "op_namescope"; }
4243

4344
void operator()(proto::OpProto *proto, OpAttrChecker *attr_checker);
4445

paddle/fluid/pybind/const_value.cc

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,9 @@ void BindConstValue(pybind11::module* m) {
4343
op_proto_and_checker_maker.def(
4444
"kOpRoleVarAttrName",
4545
framework::OpProtoAndCheckerMaker::OpRoleVarAttrName);
46+
op_proto_and_checker_maker.def(
47+
"kOpNameScopeAttrName",
48+
framework::OpProtoAndCheckerMaker::OpNamescopeAttrName);
4649
}
4750

4851
} // namespace pybind

python/paddle/fluid/framework.py

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@
4343
'default_main_program',
4444
'program_guard',
4545
'get_var',
46+
'name_scope',
4647
]
4748

4849
EMPTY_VAR_NAME = core.kEmptyVarName()
@@ -52,6 +53,70 @@
5253
CONTROL_DEP_VAR_PREFIX = core.kControlDepVarName()
5354

5455

56+
class NameScope(object):
57+
def __init__(self, name="", parent=None):
58+
self._children = dict()
59+
self._name = name
60+
self._parent = parent
61+
62+
def child(self, prefix):
63+
if prefix not in self._children:
64+
new_child = NameScope(prefix, self)
65+
self._children[prefix] = [new_child]
66+
else:
67+
new_child = NameScope(prefix + "_%d" % len(self._children[prefix]),
68+
self)
69+
self._children[prefix].append(new_child)
70+
return new_child
71+
72+
def parent(self):
73+
return self._parent
74+
75+
def name(self):
76+
return self._name
77+
78+
79+
_name_scope = NameScope()
80+
81+
82+
@contextlib.contextmanager
83+
def name_scope(prefix=None):
84+
"""
85+
Generate hierarchical name prefix for the operators.
86+
87+
Note: This should only used for debugging and visualization purpose.
88+
Don't use it for serious analysis such as graph/program transformations.
89+
90+
Args:
91+
prefix(str): prefix.
92+
93+
Examples:
94+
.. code-block:: python
95+
with name_scope("encoder"):
96+
...
97+
with name_scope("decoder"):
98+
...
99+
with name_scope("attention"):
100+
...
101+
"""
102+
# TODO(panyx0718): Only [0-9a-z].
103+
assert prefix, "namescope prefix cannot be empty."
104+
global _name_scope
105+
_name_scope = _name_scope.child(prefix)
106+
yield
107+
_name_scope = _name_scope.parent()
108+
109+
110+
def _full_name_scope():
111+
global _name_scope
112+
scope = _name_scope
113+
name = ""
114+
while scope:
115+
name = scope.name() + "/" + name
116+
scope = scope.parent()
117+
return name
118+
119+
55120
def generate_control_dev_var_name():
56121
import random
57122
return CONTROL_DEP_VAR_PREFIX + "@" + str(random.random())
@@ -515,6 +580,9 @@ def __init__(self,
515580
self.desc.set_type(type)
516581
proto = OpProtoHolder.instance().get_op_proto(type)
517582

583+
namescope_var_name = op_maker.kOpNameScopeAttrName()
584+
op_attrs[namescope_var_name] = _full_name_scope()
585+
518586
def find_name(var_list, name):
519587
for var_name in var_list:
520588
if var_list[var_name] is not None and var_name == name:

python/paddle/fluid/optimizer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from __future__ import print_function
1616
import re
1717
from collections import defaultdict
18-
from paddle.fluid.framework import Program, Variable
18+
from paddle.fluid.framework import Program, Variable, name_scope
1919
from . import framework
2020
from . import layers
2121
from .backward import append_backward
@@ -237,7 +237,7 @@ def _create_optimization_pass(self,
237237
if param_and_grad[1] is None:
238238
continue
239239
with param_and_grad[0].block.program.optimized_guard(
240-
param_and_grad):
240+
param_and_grad), name_scope("optimizer"):
241241
if param_and_grad[0].trainable is True:
242242
optimize_op = self._append_optimize_op(loss.block,
243243
param_and_grad)
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
from __future__ import print_function
16+
17+
import unittest
18+
import paddle.fluid as fluid
19+
20+
21+
class TestNameScope(unittest.TestCase):
22+
def test_name_scope(self):
23+
with fluid.name_scope("s1"):
24+
a = fluid.layers.data(name='data', shape=[1], dtype='int32')
25+
b = a + 1
26+
with fluid.name_scope("s2"):
27+
c = b * 1
28+
with fluid.name_scope("s3"):
29+
d = c / 1
30+
with fluid.name_scope("s1"):
31+
f = fluid.layers.pow(d, 2.0)
32+
with fluid.name_scope("s4"):
33+
g = f - 1
34+
35+
for op in fluid.default_main_program().block(0).ops:
36+
if op.type == 'elementwise_add':
37+
self.assertEqual(op.desc.attr("op_namescope"), '/s1/')
38+
elif op.type == 'elementwise_mul':
39+
self.assertEqual(op.desc.attr("op_namescope"), '/s1/s2/')
40+
elif op.type == 'elementwise_div':
41+
self.assertEqual(op.desc.attr("op_namescope"), '/s1/s3/')
42+
elif op.type == 'elementwise_sub':
43+
self.assertEqual(op.desc.attr("op_namescope"), '/s4/')
44+
elif op.type == 'pow':
45+
self.assertEqual(op.desc.attr("op_namescope"), '/s1_1/')

python/paddle/fluid/tests/unittests/test_operator_desc.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,10 @@ def test_op_desc_creation(self):
6767
self.assertEqual(mul_op.output("Out"), ["mul.out"])
6868
self.assertEqual(
6969
set(mul_op.attr_names),
70-
set(["x_num_col_dims", "y_num_col_dims", "op_role", "op_role_var"]))
70+
set([
71+
"x_num_col_dims", "y_num_col_dims", "op_role", "op_role_var",
72+
"op_namescope"
73+
]))
7174
self.assertEqual(mul_op.has_attr("x_num_col_dims"), True)
7275
self.assertEqual(mul_op.attr_type("x_num_col_dims"), core.AttrType.INT)
7376
self.assertEqual(mul_op.attr("x_num_col_dims"), 1)

0 commit comments

Comments
 (0)