Import related libraries
Import random from
PIL import Image
import numpy as NP
import h5py
1 2 3 4 5 1 2 3 4 5 file configuration
Image_dir = [' Image_train ', ' image_test ']
hdf5_file = [' Hdf5_train.h5 ', ' hdf5_test.h5 ']
list_file = [' List_ Train.txt ', ' list_test.txt ']
1 2 3 1 2 3 label configuration
LABELS = Dict (
# (Kind_1, kind_2)
a_0 = (0, 0), B_0 = (
1, 0), a_1 = (
0, 1), b_1 =
(1, 1), a_2 =
(0 , 2),
b_2 = (1, 2),
)
1 2 3 4 5 6 7 8 9 1 2 3 4 5 6 7 8 9 Generate a HDF5 file
print ' \nplease wait ... ' for KK, Image_dir in Enumerate (image_dir): # Read file list in file_list file_list = ... # file list Disorderly Order Random.shuffle (file_list) # tag Category Kind_index = ... # picture size is 96*32, single channel datas = Np.zeros ((Len (file_lis
T), 1, (+)) # label Size 1*2 labels = Np.zeros (len (file_list), 2) for II, _file in Enumerate (file_list): # HDF5 file requires data to be float or double format # Caffe Hdf5datalayer is not allowed in Transform_param, # so you have to divide it manually by the S[ii,:,:,:] = \ Np.array (Image.open (_file)). Astype (Np.float32)/labels[ii,:] = Np.array (LABEL S[kind_index]). Astype (np.int) # write Hdf5 file with h5py. File (Hdf5_file[kk], ' W ') as f:f[' data '] = datas f[' labels '] = labels f.close () # Write to list file, can have more
A Hdf5 file with open (List_file[kk], ' W ') as F:f.write (Os.path.abspath (Hdf5_file[kk]) + ' \ n ') F.close () print ' \ndone ... '
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 Note:
Caffe requires 1 hdf5 file size not more than 2GB, so if the amount of data is too large, it is recommended to generate multiple HDF5 files I use 50,000 pictures, the size of a total of 30 trillion, the resulting hdf5 file is 1.8GB