1111
1212
1313class SimpleNetRNN (tf .keras .models .Model ):
14- def __init__ (self , n_neurons ):
14+ def __init__ (self , n_neurons , output_units ):
1515 super (SimpleNetRNN , self ).__init__ ()
1616 self .n_neurons = n_neurons
17+ self .output_units = output_units
1718 self .network = tf .keras .Sequential (
1819 [
1920 tf .keras .layers .SimpleRNN (
2021 self .n_neurons , return_sequences = True , input_shape = [None , 1 ]
2122 ),
2223 tf .keras .layers .SimpleRNN (self .n_neurons ),
23- tf .keras .layers .Dense (4 , activation = "tanh" ),
24+ tf .keras .layers .Dense (self . output_units , activation = "tanh" ),
2425 ]
2526 )
2627
@@ -30,12 +31,13 @@ def call(self, x):
3031
3132
3233class SimpleNetGRU (tf .keras .models .Model ):
33- def __init__ (self , filters , kernel_size , n_units , strides ):
34+ def __init__ (self , filters , kernel_size , n_units , strides , output_units ):
3435 super (SimpleNetGRU , self ).__init__ ()
3536 self .filters = filters
3637 self .kernel_size = kernel_size
3738 self .n_units = n_units
3839 self .strides = strides
40+ self .output_units = output_units
3941 self .network = tf .keras .Sequential (
4042 [
4143 tf .keras .layers .Conv1D (
@@ -47,7 +49,7 @@ def __init__(self, filters, kernel_size, n_units, strides):
4749 ),
4850 tf .keras .layers .GRU (self .n_units , return_sequences = True ),
4951 tf .keras .layers .GRU (self .n_units ),
50- tf .keras .layers .Dense (1 , activation = "linear" ),
52+ tf .keras .layers .Dense (self . output_units , activation = "linear" ),
5153 ]
5254 )
5355
@@ -76,29 +78,29 @@ def make_predictions(model, input_model, n_steps):
7678if __name__ == "__main__" :
7779 num_series = 80
7880 series_size = 100
79- n_steps = 5
81+ n_steps = 4 # For SimpleNetRNN use 4
8082
8183 input_serie = generate_series (num_series , series_size , incline = False )
8284 y_new = input_serie [:, :- n_steps ]
8385 print (y_new .shape )
8486 scaler = DataScaler (y_new )
8587 y_new = scaler .rescale ()
8688 size_ = int (0.8 * y_new .shape [0 ])
87- x_train = y_new [:size_ , :- 4 ]
88- y_train = y_new [:size_ , - 4 :]
89- x_test = y_new [size_ :, :- 4 ]
90- y_test = y_new [size_ :, - 4 :]
89+ x_train = y_new [:size_ , :- n_steps ]
90+ y_train = y_new [:size_ , - n_steps :]
91+ x_test = y_new [size_ :, :- n_steps ]
92+ y_test = y_new [size_ :, - n_steps :]
9193
9294 print (x_train .shape , y_train .shape )
9395
94- # optimizer = tf.keras.optimizers.Adam(learning_rate = 0.01)
95- optimizer = tf .keras .optimizers .SGD (learning_rate = 0.01 )
96- model = SimpleNetRNN (n_neurons = 10 )
97- # model = SimpleNetGRU(filters = 10, kernel_size = 4, n_units = 5, strides = 2 )
96+ optimizer = tf .keras .optimizers .Adam (learning_rate = 0.01 )
97+ # optimizer = tf.keras.optimizers.SGD(learning_rate=0.01)
98+ # model = SimpleNetRNN(n_neurons=10, output_units=n_steps )
99+ model = SimpleNetGRU (filters = 10 , kernel_size = 4 , n_units = 5 , strides = 2 , output_units = n_steps )
98100 # model(x_train[:, :, np.newaxis])
99- model .compile (loss = "mse" , optimizer = "sgd" , metrics = ["mae" ])
101+ model .compile (loss = "mse" , optimizer = optimizer , metrics = ["mae" ])
100102 history = model .fit (
101- x_train [:, :, np .newaxis ], y_train , epochs = 100 , validation_split = 0.2 , callbacks = [PrintDot ()]
103+ x_train [:, :, np .newaxis ], y_train , epochs = 200 , validation_split = 0.2 , callbacks = [PrintDot ()]
102104 )
103105
104106 hist = pd .DataFrame (history .history )
@@ -111,13 +113,18 @@ def make_predictions(model, input_model, n_steps):
111113 plt .xlabel ("Epoch" )
112114 plt .ylabel ("Loss" )
113115 plt .show ()
114-
115- X = make_predictions (model , x_train [:, :, np .newaxis ], n_steps = n_steps )
116+ n_pred = 8
117+ X = make_predictions (model , x_train [:, :, np .newaxis ], n_steps = n_pred )
116118 print (X .shape )
117119 X = scaler .scale (X )
118120
119121 plt .plot (range (len (input_serie [0 , :])), input_serie [0 , :], "o-" , label = "real value" )
120- plt .plot (range (len (X [0 , :]))[- n_steps * 4 :], X [0 , :][- n_steps * 4 :], "-r" , label = "prediction" )
122+ plt .plot (
123+ range (len (X [0 , :]))[- n_steps * n_pred :],
124+ X [0 , :][- n_steps * n_pred :],
125+ "-r" ,
126+ label = "prediction" ,
127+ )
121128 plt .xlabel ("Time" )
122129 plt .ylabel ("Value" )
123130 plt .legend ()
0 commit comments