Skip to content

Commit 7f794ea

Browse files
committed
Replace the overfix of 2to3 with six.string_types
1 parent ce4eba3 commit 7f794ea

File tree

13 files changed

+52
-39
lines changed

13 files changed

+52
-39
lines changed

python/paddle/fluid/clip.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# limitations under the License.
1414

1515
import copy
16+
import six
1617

1718
import functools
1819
from . import layers
@@ -246,8 +247,8 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr):
246247
"""
247248

248249
def __init__(self, clip_norm, group_name="default_group"):
249-
if not isinstance(group_name, str):
250-
raise TypeError("'group_name' must be a basestring.")
250+
if not isinstance(group_name, six.string_types):
251+
raise TypeError("'group_name' must be a %s." % (six.string_types))
251252

252253
self.clip_norm = clip_norm
253254
self.group_name = group_name
@@ -312,7 +313,7 @@ def set_gradient_clip(clip, param_list=None, program=None):
312313
program = framework.default_main_program()
313314
if param_list is None:
314315
param_list = program.block(0).all_parameters()
315-
if all(isinstance(elem, str) for elem in param_list):
316+
if all(isinstance(elem, six.string_types) for elem in param_list):
316317
param_list = [program.block(0).var(elem) for elem in param_list]
317318
if not all(isinstance(elem, framework.Parameter) for elem in param_list):
318319
raise TypeError(

python/paddle/fluid/data_feeder.py

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@
1515
from . import core
1616
import numpy
1717
import os
18-
import six.moves as six
18+
import six
19+
from six.moves import zip, range, xrange
1920
import multiprocessing
2021

2122
from .framework import Variable, default_main_program
@@ -52,7 +53,7 @@ def __init__(self, place, lod_level, shape, dtype):
5253
self.data = []
5354
self.lod = []
5455

55-
for i in six.range(lod_level):
56+
for i in six.moves.range(lod_level):
5657
self.lod.append([])
5758

5859
def feed(self, data):
@@ -141,7 +142,7 @@ def __init__(self, feed_list, place, program=None):
141142
if program is None:
142143
program = default_main_program()
143144
for each_var in feed_list:
144-
if isinstance(each_var, str):
145+
if isinstance(each_var, six.string_types):
145146
each_var = program.block(0).var(each_var)
146147
if not isinstance(each_var, Variable):
147148
raise TypeError("Feed list should contain a list of variable")
@@ -173,7 +174,7 @@ def feed(self, iterable):
173174
dict: the result of conversion.
174175
"""
175176
converter = []
176-
for lod_level, shape, dtype in six.zip(
177+
for lod_level, shape, dtype in six.moves.zip(
177178
self.feed_lod_level, self.feed_shapes, self.feed_dtypes):
178179
converter.append(
179180
DataToLoDTensorConverter(
@@ -186,10 +187,12 @@ def feed(self, iterable):
186187
assert len(each_sample) == len(converter), (
187188
"The number of fields in data (%s) does not match " +
188189
"len(feed_list) (%s)") % (len(each_sample), len(converter))
189-
for each_converter, each_slot in six.zip(converter, each_sample):
190+
for each_converter, each_slot in six.moves.zip(converter,
191+
each_sample):
190192
each_converter.feed(each_slot)
191193
ret_dict = {}
192-
for each_name, each_converter in six.zip(self.feed_names, converter):
194+
for each_name, each_converter in six.moves.zip(self.feed_names,
195+
converter):
193196
ret_dict[each_name] = each_converter.done()
194197
return ret_dict
195198

@@ -211,12 +214,14 @@ def feed_parallel(self, iterable, num_places=None):
211214
if isinstance(self.place, core.CUDAPlace):
212215
places = [
213216
core.CUDAPlace(i)
214-
for i in six.xrange(self._get_number_of_places_(num_places))
217+
for i in six.moves.xrange(
218+
self._get_number_of_places_(num_places))
215219
]
216220
else:
217221
places = [
218222
core.CPUPlace()
219-
for _ in six.xrange(self._get_number_of_places_(num_places))
223+
for _ in six.moves.xrange(
224+
self._get_number_of_places_(num_places))
220225
]
221226

222227
if len(iterable) != len(places):
@@ -226,7 +231,7 @@ def feed_parallel(self, iterable, num_places=None):
226231
"must be same.")
227232

228233
place = self.place
229-
for p, batch in six.zip(places, iterable):
234+
for p, batch in six.moves.zip(places, iterable):
230235
self.place = p
231236
yield self.feed(batch)
232237
self.place = place

python/paddle/fluid/executor.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
import numpy as np
1616
import contextlib
17+
import six
1718
from .framework import Program, default_main_program, Variable
1819
from . import core
1920

@@ -211,7 +212,7 @@ def to_name_str(var):
211212
return var.desc.name()
212213
elif isinstance(var, str):
213214
return var
214-
elif isinstance(var, str):
215+
elif isinstance(var, six.string_types):
215216
return str(var)
216217
else:
217218
raise TypeError(str(var) + " should be Variable or str")
@@ -229,8 +230,8 @@ class Executor(object):
229230
to feed map and fetch_list. Feed map provides input data for the program. fetch_list provides
230231
the variables(or names) that user want to get after program run. Note: the executor will run all
231232
operators in the program but not only the operators dependent by the fetch_list.
232-
It store the global variables into the global scope, and create a local scope for the temporary
233-
variables. The local scope contents will be discarded after every minibatch forward/backward finished.
233+
It store the global variables into the global scope, and create a local scope for the temporary
234+
variables. The local scope contents will be discarded after every minibatch forward/backward finished.
234235
But the global scope variables will be persistent through different runs.
235236
All of ops in program will be running in sequence.
236237

python/paddle/fluid/framework.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -524,12 +524,12 @@ def find_name(var_list, name):
524524
% (in_proto.name, len(in_args)))
525525
in_arg_names = []
526526
for arg in in_args:
527-
if issubclass(arg.__class__, six.string_types):
527+
if isinstance(arg, six.string_types):
528528
in_arg_names.append(arg)
529529
elif isinstance(arg, six.binary_type):
530530
in_arg_names.append(arg.decode())
531531
else:
532-
if issubclass(arg.name.__class__, six.string_types):
532+
if isinstance(arg.name, six.string_types):
533533
in_arg_names.append(arg.name)
534534
elif isinstance(arg.name, six.binary_type):
535535
in_arg_names.append(arg.name.decode())
@@ -561,7 +561,7 @@ def find_name(var_list, name):
561561
(out_proto.name, len(out_args)))
562562
out_arg_names = []
563563
for arg in out_args:
564-
if issubclass(arg.name.__class__, six.string_types):
564+
if isinstance(arg.name, six.string_types):
565565
out_arg_names.append(arg.name)
566566
elif isinstance(arg.name, six.binary_type):
567567
out_arg_names.append(arg.name.decode())
@@ -911,7 +911,7 @@ def var(self, name):
911911
Returns:
912912
Variable: the Variable with the giving name.
913913
"""
914-
if not issubclass(name.__class__, six.string_types):
914+
if not isinstance(name, six.string_types):
915915
if not isinstance(name, six.binary_type):
916916
raise TypeError(
917917
"var require string as parameter, but get %s instead." %

python/paddle/fluid/graphviz.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,13 @@
1414

1515
import os
1616
import random
17+
import six
1718
import subprocess
1819
import logging
1920

2021

2122
def crepr(v):
22-
if type(v) is str or type(v) is str:
23+
if isinstance(v, six.string_types):
2324
return '"%s"' % v
2425
return str(v)
2526

python/paddle/fluid/io.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -612,9 +612,6 @@ def save_inference_model(dirname,
612612
if not (all(
613613
isinstance(name, six.text_type)
614614
for name in feeded_var_names)):
615-
import sys
616-
print([type(name) for name in feeded_var_names])
617-
sys.stdout.flush()
618615
raise ValueError(
619616
"'feed_var_names' should be a list of str.")
620617
else:

python/paddle/fluid/layer_helper.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
import copy
1616
import itertools
17+
import six
1718

1819
from .framework import Variable, Parameter, default_main_program, default_startup_program, dtype_is_floating
1920
from . import unique_name
@@ -398,7 +399,7 @@ def append_activation(self, input_var):
398399
act = self.kwargs.get('act', None)
399400
if act is None:
400401
return input_var
401-
if isinstance(act, str):
402+
if isinstance(act, six.string_types):
402403
act = {'type': act}
403404

404405
if 'use_cudnn' in self.kwargs and self.kwargs.get('use_cudnn'):

python/paddle/fluid/op.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def get_all_op_protos():
3232

3333

3434
def is_str(s):
35-
return isinstance(s, str) or isinstance(s, str)
35+
return isinstance(s, six.string_types)
3636

3737

3838
class OpDescCreationMethod(object):

python/paddle/fluid/param_attr.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import six
16+
1517
from .initializer import Initializer, Xavier, Constant
1618
from .regularizer import WeightDecayRegularizer
1719

@@ -134,7 +136,7 @@ def _to_attr(arg):
134136
return [ParamAttr._to_attr(a) for a in arg]
135137
elif isinstance(arg, ParamAttr):
136138
return arg
137-
elif isinstance(arg, str) or isinstance(arg, str):
139+
elif isinstance(arg, six.string_types):
138140
return ParamAttr(name=arg)
139141
elif isinstance(arg, Initializer):
140142
return ParamAttr(initializer=arg)

python/paddle/fluid/tests/unittests/benchmark.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
import unittest
1717
import time
1818
import itertools
19+
import six
1920

2021
import paddle.fluid as fluid
2122
import paddle.fluid.core as core
@@ -40,7 +41,8 @@ def _assert_cpu_gpu_same(self, cpu_outs, gpu_outs, fetch_list, atol):
4041
expect_t = np.array(item_cpu_out)
4142
actual = item_gpu_out
4243
actual_t = np.array(item_gpu_out)
43-
var_name = variable if isinstance(variable, str) else variable.name
44+
var_name = variable if isinstance(
45+
variable, six.string_types) else variable.name
4446
self.assertTrue(
4547
np.allclose(
4648
actual_t, expect_t, atol=atol),

0 commit comments

Comments
 (0)