To import the desired lib:
From keras.datasets import mnist to
keras.utils import np_utils from
keras.models import sequential
From keras.layers import dense,dropout,activation,simplernn from
keras.optimizers import Adam
Import NumPy as NP
To set a random seed:
Np.random.seed (1337)
Set some parameters:
time_steps=28 #时间点, length of time (28 lines)
input_size=28 #每一行 Read how many
ouput_size=10 #输出 results one_hot
Cell_size=50 # How many unit lr=0.001 to put on a hidden layer in the network #学习率
Load data:
(X_train,y_train), (x_test,y_test) =mnist.load_data () #加载数据
Data preprocessing:
#数据预处理
X_train=x_train.reshape ( -1,28,28)/255
x_test=x_test.reshape ( -1,28,28)/255
y_train=np_ Utils.to_categorical (y_train,num_classes=10)
y_test=np_utils.to_categorical (y_test,num_classes=10)
To create a model:
#建型
model=sequential ()
RNN:
#RNN
model.add (simplernn
cell_size,
input_shape= (time_steps,input_size), # or:input_dim=input_size, Input_length=time_steps,
))
Output layer:
Model.add (Dense (ouput_size))
Model.add (Activation (' Softmax '))
Optimizer:
#优化器
Adam=adam (LR)
Parameter information for the output model:
Model.summary ()
Build the Model:
#搭建模型
model.compile (optimizer= ' Adam ', loss= ' categorical_crossentropy ', metrics=[' accuracy '])
Training:
Model.fit (X_TRAIN,Y_TRAIN,EPOCHS=5,BATCH_SIZE=32)
Evaluation:
Loss,accuracy=model.evaluate (x_test,y_test)
print (loss)
print (accuracy)
Results:
9888/10000 [============================>]-eta:0s0.188649062154
0.9443