Skip to content

Commit c57b81d

Browse files
committed
Added Python 2 support.
1 parent c9c2d41 commit c57b81d

File tree

1 file changed

+12
-12
lines changed

1 file changed

+12
-12
lines changed

pytorch2keras/converter.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,8 @@ def _optimize_graph(graph, aten):
5353

5454
def get_node_id(node):
5555
import re
56-
node_id = re.search(r"[\d]+", node.__str__())[0]
57-
return node_id
56+
node_id = re.search(r"[\d]+", node.__str__())
57+
return node_id.group(0)
5858

5959

6060
def pytorch_to_keras(
@@ -103,11 +103,12 @@ def pytorch_to_keras(
103103

104104
# Collect graph outputs
105105
graph_outputs = [n.uniqueName() for n in trace.graph().outputs()]
106+
print('Graph outputs:', graph_outputs)
106107

107108
# Collect model state dict
108109
state_dict = _unique_state_dict(model)
109110
if verbose:
110-
print(list(state_dict))
111+
print('State dict:', list(state_dict))
111112

112113
import re
113114
import keras
@@ -173,21 +174,20 @@ def pytorch_to_keras(
173174
for layer in conf['layers']:
174175
if layer['config'] and 'batch_input_shape' in layer['config']:
175176
layer['config']['batch_input_shape'] = \
176-
tuple(np.reshape(
177+
tuple(np.reshape(np.array(
177178
[
178-
None,
179-
*layer['config']['batch_input_shape'][2:][:],
180-
layer['config']['batch_input_shape'][1]
181-
], -1
179+
[None] +
180+
list(layer['config']['batch_input_shape'][2:][:]) +
181+
[layer['config']['batch_input_shape'][1]]
182+
]), -1
182183
))
183-
184184
if layer['config'] and 'target_shape' in layer['config']:
185185
layer['config']['target_shape'] = \
186-
tuple(np.reshape(
186+
tuple(np.reshape(np.array(
187187
[
188-
*layer['config']['target_shape'][1:][:],
188+
list(layer['config']['target_shape'][1:][:]),
189189
layer['config']['target_shape'][0]
190-
], -1
190+
]), -1
191191
))
192192
if layer['config'] and 'data_format' in layer['config']:
193193
layer['config']['data_format'] = 'channels_last'

0 commit comments

Comments
 (0)