Skip to content

Commit ab47158

Browse files
authored
fix default create_parameter dtype maching initializers (#15521)
* fix default create_parameter dtype maching initializers test=develop * update type check test=develop * update test=develop
1 parent 67e4450 commit ab47158

File tree

2 files changed

+13
-1
lines changed

2 files changed

+13
-1
lines changed

python/paddle/fluid/layer_helper.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -300,6 +300,17 @@ def create_parameter(self,
300300
attr.name = unique_name.generate(".".join([self.name, suffix]))
301301

302302
if default_initializer is None and attr.initializer is None:
303+
if isinstance(dtype, core.VarDesc.VarType):
304+
if dtype != core.VarDesc.VarType.FP32 and \
305+
dtype != core.VarDesc.VarType.FP64:
306+
raise TypeError(
307+
"Can not create parameter with default initializer when dtype is not float type. Set default_initializer to fit the parameter dtype!"
308+
)
309+
else:
310+
if not (dtype.startswith("float") or dtype == "double"):
311+
raise TypeError(
312+
"Can not create parameter with default initializer when dtype is not float type. Set default_initializer to fit the parameter dtype!"
313+
)
303314
if is_bias:
304315
attr._set_default_bias_initializer()
305316
else:

python/paddle/fluid/tests/unittests/test_layers.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,8 @@ def test_recognize_digits_mlp(self):
5858
def test_simple_conv2d(self):
5959
program = Program()
6060
with program_guard(program, startup_program=Program()):
61-
images = layers.data(name='pixel', shape=[3, 48, 48], dtype='int32')
61+
images = layers.data(
62+
name='pixel', shape=[3, 48, 48], dtype='float32')
6263
layers.conv2d(input=images, num_filters=3, filter_size=[4, 4])
6364

6465
print(str(program))

0 commit comments

Comments
 (0)