Skip to content

Mcfly generate models gives NotImplementedError #15

@sverhoeven

Description

@sverhoeven

I tried to run notebooks/TS_dataset_generation_benchmarking1.ipynb notebook However when calling cell

num_classes = y_train_binary.shape[1]

models = modelgen.generate_models(np.swapaxes(X_train,1,2).shape,
                                  number_of_classes=num_classes,
                                  number_of_models = 12)
I got an `NotImplementedError: Cannot convert a symbolic Tensor (lstm/strided_slice:0) to a numpy array. This error may indicate that you're trying to pass a Tensor to a NumPy call, which is not supported`
---------------------------------------------------------------------------
NotImplementedError                       Traceback (most recent call last)
<ipython-input-20-2407be9b3991> in <module>
      1 num_classes = y_train_binary.shape[1]
      2 
----> 3 models = modelgen.generate_models(np.swapaxes(X_train,1,2).shape,
      4                                   number_of_classes=num_classes,
      5                                   number_of_models = 12)

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/mcfly/modelgen.py in generate_models(x_shape, number_of_classes, number_of_models, model_types, metrics, **hyperparameter_ranges)
    116 
    117         hyperparameters = model_type.generate_hyperparameters()
--> 118         model = model_type.create_model(**hyperparameters)
    119         model_name = model_type.model_name
    120 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/mcfly/models/deep_conv_lstm.py in create_model(self, filters, lstm_dims, learning_rate, regularization_rate)
    147 
    148         for lstm_dim in lstm_dims:
--> 149             model.add(LSTM(units=lstm_dim, return_sequences=True,
    150                            activation='tanh'))
    151 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/training/tracking/base.py in _method_wrapper(self, *args, **kwargs)
    515     self._self_setattr_tracking = False  # pylint: disable=protected-access
    516     try:
--> 517       result = method(self, *args, **kwargs)
    518     finally:
    519       self._self_setattr_tracking = previous_value  # pylint: disable=protected-access

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/engine/sequential.py in add(self, layer)
    221       # If the model is being built continuously on top of an input layer:
    222       # refresh its output.
--> 223       output_tensor = layer(self.outputs[0])
    224       if len(nest.flatten(output_tensor)) != 1:
    225         raise ValueError(SINGLE_LAYER_OUTPUT_ERROR_MSG)

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/layers/recurrent.py in __call__(self, inputs, initial_state, constants, **kwargs)
    658 
    659     if initial_state is None and constants is None:
--> 660       return super(RNN, self).__call__(inputs, **kwargs)
    661 
    662     # If any of `initial_state` or `constants` are specified and are Keras

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/engine/base_layer.py in __call__(self, *args, **kwargs)
    949     # >> model = tf.keras.Model(inputs, outputs)
    950     if _in_functional_construction_mode(self, inputs, args, kwargs, input_list):
--> 951       return self._functional_construction_call(inputs, args, kwargs,
    952                                                 input_list)
    953 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/engine/base_layer.py in _functional_construction_call(self, inputs, args, kwargs, input_list)
   1088           layer=self, inputs=inputs, build_graph=True, training=training_value):
   1089         # Check input assumptions set after layer building, e.g. input shape.
-> 1090         outputs = self._keras_tensor_symbolic_call(
   1091             inputs, input_masks, args, kwargs)
   1092 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/engine/base_layer.py in _keras_tensor_symbolic_call(self, inputs, input_masks, args, kwargs)
    820       return nest.map_structure(keras_tensor.KerasTensor, output_signature)
    821     else:
--> 822       return self._infer_output_signature(inputs, args, kwargs, input_masks)
    823 
    824   def _infer_output_signature(self, inputs, args, kwargs, input_masks):

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/engine/base_layer.py in _infer_output_signature(self, inputs, args, kwargs, input_masks)
    861           # TODO(kaftan): do we maybe_build here, or have we already done it?
    862           self._maybe_build(inputs)
--> 863           outputs = call_fn(inputs, *args, **kwargs)
    864 
    865         self._handle_activity_regularization(inputs, outputs)

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/layers/recurrent_v2.py in call(self, inputs, mask, training, initial_state)
   1155 
   1156     # LSTM does not support constants. Ignore it during process.
-> 1157     inputs, initial_state, _ = self._process_inputs(inputs, initial_state, None)
   1158 
   1159     if isinstance(mask, list):

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/layers/recurrent.py in _process_inputs(self, inputs, initial_state, constants)
    857         initial_state = self.states
    858     elif initial_state is None:
--> 859       initial_state = self.get_initial_state(inputs)
    860 
    861     if len(initial_state) != len(self.states):

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/layers/recurrent.py in get_initial_state(self, inputs)
    640     dtype = inputs.dtype
    641     if get_initial_state_fn:
--> 642       init_state = get_initial_state_fn(
    643           inputs=None, batch_size=batch_size, dtype=dtype)
    644     else:

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/layers/recurrent.py in get_initial_state(self, inputs, batch_size, dtype)
   2504 
   2505   def get_initial_state(self, inputs=None, batch_size=None, dtype=None):
-> 2506     return list(_generate_zero_filled_state_for_cell(
   2507         self, inputs, batch_size, dtype))
   2508 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/layers/recurrent.py in _generate_zero_filled_state_for_cell(cell, inputs, batch_size, dtype)
   2985     batch_size = array_ops.shape(inputs)[0]
   2986     dtype = inputs.dtype
-> 2987   return _generate_zero_filled_state(batch_size, cell.state_size, dtype)
   2988 
   2989 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/layers/recurrent.py in _generate_zero_filled_state(batch_size_tensor, state_size, dtype)
   3001 
   3002   if nest.is_nested(state_size):
-> 3003     return nest.map_structure(create_zeros, state_size)
   3004   else:
   3005     return create_zeros(state_size)

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/util/nest.py in map_structure(func, *structure, **kwargs)
    657 
    658   return pack_sequence_as(
--> 659       structure[0], [func(*x) for x in entries],
    660       expand_composites=expand_composites)
    661 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/util/nest.py in <listcomp>(.0)
    657 
    658   return pack_sequence_as(
--> 659       structure[0], [func(*x) for x in entries],
    660       expand_composites=expand_composites)
    661 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/keras/layers/recurrent.py in create_zeros(unnested_state_size)
   2998     flat_dims = tensor_shape.TensorShape(unnested_state_size).as_list()
   2999     init_state_size = [batch_size_tensor] + flat_dims
-> 3000     return array_ops.zeros(init_state_size, dtype=dtype)
   3001 
   3002   if nest.is_nested(state_size):

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/util/dispatch.py in wrapper(*args, **kwargs)
    199     """Call target, and fall back on dispatchers if there is a TypeError."""
    200     try:
--> 201       return target(*args, **kwargs)
    202     except (TypeError, ValueError):
    203       # Note: convert_to_eager_tensor currently raises a ValueError, not a

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/ops/array_ops.py in wrapped(*args, **kwargs)
   2817 
   2818   def wrapped(*args, **kwargs):
-> 2819     tensor = fun(*args, **kwargs)
   2820     tensor._is_zeros_tensor = True
   2821     return tensor

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/ops/array_ops.py in zeros(shape, dtype, name)
   2866           # Create a constant if it won't be very big. Otherwise create a fill
   2867           # op to prevent serialized GraphDefs from becoming too large.
-> 2868           output = _constant_if_small(zero, shape, dtype, name)
   2869           if output is not None:
   2870             return output

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/ops/array_ops.py in _constant_if_small(value, shape, dtype, name)
   2802 def _constant_if_small(value, shape, dtype, name):
   2803   try:
-> 2804     if np.prod(shape) < 1000:
   2805       return constant(value, shape=shape, dtype=dtype, name=name)
   2806   except TypeError:

<__array_function__ internals> in prod(*args, **kwargs)

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/numpy/core/fromnumeric.py in prod(a, axis, dtype, out, keepdims, initial, where)
   3028     10
   3029     """
-> 3030     return _wrapreduction(a, np.multiply, 'prod', axis, dtype, out,
   3031                           keepdims=keepdims, initial=initial, where=where)
   3032 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/numpy/core/fromnumeric.py in _wrapreduction(obj, ufunc, method, axis, dtype, out, **kwargs)
     85                 return reduction(axis=axis, out=out, **passkwargs)
     86 
---> 87     return ufunc.reduce(obj, axis, dtype, out, **passkwargs)
     88 
     89 

~/git/epodium/time_series_generator/env/lib/python3.8/site-packages/tensorflow/python/framework/ops.py in __array__(self)
    850 
    851   def __array__(self):
--> 852     raise NotImplementedError(
    853         "Cannot convert a symbolic Tensor ({}) to a numpy array."
    854         " This error may indicate that you're trying to pass a Tensor to"

NotImplementedError: Cannot convert a symbolic Tensor (lstm/strided_slice:0) to a numpy array. This error may indicate that you're trying to pass a Tensor to a NumPy call, which is not supported
(expand to see full stacktrace)

I installed mcfly==3.1.0 with pip.

Metadata

Metadata

Assignees

No one assigned

    Labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions