-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathHW4 P2.py
More file actions
94 lines (64 loc) · 2.17 KB
/
HW4 P2.py
File metadata and controls
94 lines (64 loc) · 2.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras.models import Sequential
from keras.layers import Dense, Activation, SimpleRNN, LSTM
from keras.utils import to_categorical, plot_model
from keras.datasets import mnist
import numpy as np
import random
# load mnist dataset
data = np.load('data.npy')
print(np.shape(data))
x=data[:,0:499]
y=data[:,1:500]
x_train=x[0:66,:]
y_train=y[0:66,:]
x_test=x[66:100,:]
y_test=y[66:100,:]
# compute the number of labels
num_labels = len(np.unique(y_train))
## convert to one-hot vector
#y_train = to_categorical(y_train)
#y_test = to_categorical(y_test)
# resize
x_train = x_train.reshape(-1, 499, 1)
x_test = x_test.reshape(-1, 499, 1)
y_train = y_train.reshape(-1, 499, 1)
y_test = y_test.reshape(-1, 499, 1)
# network parameters
batch_size = 20
dropout = 0.2
# model is RNN with 256 units, input is 28-dim vector 28 timesteps
model = Sequential()
model.add(SimpleRNN(units=20,
dropout=dropout,
input_shape=(499,1),
return_sequences = True,
activation='relu'))
model.add(SimpleRNN(units=29,
dropout=dropout,
return_sequences = True,
activation='relu'))
model.add(Dense(1,activation = None))
model.summary()
# loss function for one-hot vector
# use of sgd optimizer
# accuracy is good metric for classification tasks
model.compile(loss='mse',
optimizer='sgd',
metrics=['mean_squared_error'])
# train the network
history = model.fit(x_train, y_train, epochs=20, batch_size=batch_size,validation_data=(x_test, y_test), verbose=2)
plt.plot(history.history['mean_squared_error'], "r")
plt.plot(history.history['val_loss'], "b")
plt.title('Model MSE')
plt.ylabel('MSE')
plt.xlabel('Epoch')
plt.legend(['Training', "Test"], loc='upper left')
plt.show()
# Test Accuracy
# Evaluate the model on test set
Test = model.evaluate(x_test, y_test, batch_size =batch_size)
# Print test accuracy
print('\n', 'Test accuracy:', Test[1])