The Keras has many advantages, and building a model is quick and easy, but it is recommended to understand the basic principles of neural networks.
Backend suggested using TensorFlow, much faster than Theano.
From sklearn.datasets import Load_iris from sklearn.model_selection import train_test_split import Keras from Keras.model s import sequential from keras.layers import dense, dropout from keras.optimizers import SGD from keras.models import load _model Import Matplotlib.pyplot as PLT # Save loss and ACC, Paint class Losshistory (keras.callbacks.Callback): Def On_train_beg
In (Self, logs={}): self.losses = {' Batch ': [], ' epoch ': []} self.accuracy = {' Batch ': [], ' epoch ': []} Self.val_loss = {' Batch ': [], ' epoch ': []} SELF.VAL_ACC = {' Batch ': [], ' epoch ': []} def on_batch_end (SE LF, batch, logs={}): self.losses[' Batch '].append (Logs.get (' loss ')) self.accuracy[' batch '].append (Logs.get ( ' ACC ')) self.val_loss[' batch '].append (Logs.get (' Val_loss ')) self.val_acc[' batch '].append (Logs.get (' Val_acc ') def on_epoch_end (self, batch, logs={}): self.losses[' epoch '].append (logs.get (' loss ')) Self.accura cy[' epoch '].append (LOGS.GET (' acc ')) self.val_loss[' epoch '].append (logs.get (' Val_loss ')) self.val_acc[' epoch '].append (Logs.get (' Val_
ACC ')) def loss_plot (self, loss_type): Iters = Range (len (self.losses[loss_type))) Plt.figure ()
# Train ACC Training set accuracy rate accuracy plt.plot (iters, Self.accuracy[loss_type], ' R ', label= ' Train acc ') # Loss Plt.plot (Iters, Self.losses[loss_type], ' g ', label= ' train loss ') if Loss_type = = ' Epoch ': # val A
CC validation set accuracy rate validation Plt.plot (iters, Self.val_acc[loss_type], ' B ', Label= ' Val acc ') # val Loss Plt.plot (Iters, Self.val_loss[loss_type], ' k ', label= ' Val loss ') Plt.grid (True) Plt.xlabel (loss
_type) Plt.ylabel (' Acc-loss ') plt.legend (loc= "upper left") plt.show () # Get data = Load_iris () # print (data) # print (type data) x = data[' data '] # print (x[1]) y = data[' target '] # Training set Test Set Partitioning | Random_state: Random number seed x_train, X_test, Y_init_train, Y_init_test = Train_test_split (x, Y, test_size=0.2, random_state=1) # View first sample print (X_test[:1]) print (y_init_test[:1]) p
Rint (x_train.shape) # one hot coded y_train = keras.utils.to_categorical (Y_init_train, num_classes=3) print (Y_train.shape) Y_test = keras.utils.to_categorical (y_init_test, num_classes=3) print (y_test[:1]) "' Based on MLP (multi-layer
Perceptron) Multilayer Perceptron Softmax Multi-class "model = Sequential () # dense () is a fully-connected layer with the hidden units.
# in the first layer, you must specify the expected input data shape: # here, 4-dimensional vectors.
Model.add (Dense (, activation= ' Relu ', input_dim=4)) # Dropout random deactivation, often used in image recognition, to prevent overfitting # Model.add (Dropout (0.2)) Model.add (Dense (activation= ' relu ')) Model.add (dense (+ activation= ' Relu ')) Model.add (Dense (3, activation= ') Softmax ') # LR denotes the learning rate, momentum represents the momentum term, decay is the decay factor of the learning rate (once per epoch Decay) # Nesterov The value is False or true, which means that the Nesterov momentum # is not used SGD random gradient drop sgd = SGD (lr=0.01, decay=1e-6, momentum=0.9, Nesterov=true) # compilation Model Model.compile (loss= ' CATegorical_crossentropy ', OPTIMIZER=SGD, metrics=[' accuracy ') # Create an Instance history = Lossh Istory () # Training Model.fit (X_train, Y_train, epochs=30, # data was wheeled 30 times batch_size=128, Validation_ Data= (X_test, Y_test), Callbacks=[history]) # Save Model # Model.save (' Iris.h5 ') # Read Model # = Load_model (' iris.h5 ')
) score = Model.evaluate (X_test, Y_test, Verbose=0, batch_size=128) # do not write default is verbose=1, print progress bar record, 0 does not print. Print (' Test loss: ', score[0]) print (' Test accuracy: ', score[1]) # p_pred = Model.predict (x_test) # print ("p_pred:\n", P_PR ed) label_pred = Model.predict_classes (x_test, verbose=0) print ("label_pred4test:\n", label_pred) print ("Label_ Init4test:\n ", y_init_test) Label_pred4train = model.predict_classes (X_train, verbose=0) print (" label_pred4train:\n " , Label_pred4train) print ("label_init4train:\n", Y_init_train) # Draw Acc-loss Curve History.loss_plot (' epoch ')
Output results
Acc-loss curve
Reference Links:
Keras Chinese Documents
Keras Drawing acc and loss graphs