Parcourir la source

Update of SVD model

jbuisine il y a 5 ans
Parent
commit
a900315b27
2 fichiers modifiés avec 27 ajouts et 7 suppressions
  1. 2 2
      README.md
  2. 25 5
      classification_cnn_keras_svd.py

+ 2 - 2
README.md

@@ -17,8 +17,8 @@ It will split scenes and generate all data you need for your neural network.
 You can specify the number of sub images you want in the script by modifying **_NUMBER_SUB_IMAGES_** variables.
 You can specify the number of sub images you want in the script by modifying **_NUMBER_SUB_IMAGES_** variables.
 
 
 There are 3 kinds of Neural Networks :
 There are 3 kinds of Neural Networks :
-- **classification_cnn_keras.py** : *based croped on images*
-- **classification_cnn_keras_crossentropy.py** : *based croped on images which are randomly split for training*
+- **classification_cnn_keras.py** : *based on cropped images and do convolution*
+- **classification_cnn_keras_cross_validation.py** : *based on cropped images and do convolution. Data are randomly split for training*
 - **classification_cnn_keras_svd.py** : *based on svd metrics of image*
 - **classification_cnn_keras_svd.py** : *based on svd metrics of image*
 
 
 Note that the image input size need to change in you used specific size for your croped images.
 Note that the image input size need to change in you used specific size for your croped images.

+ 25 - 5
classification_cnn_keras_svd.py

@@ -80,24 +80,44 @@ def generate_model():
     model.add(MaxPooling2D(pool_size=(2, 1)))
     model.add(MaxPooling2D(pool_size=(2, 1)))
 
 
     model.add(Flatten())
     model.add(Flatten())
+    model.add(Dense(50, kernel_regularizer=l2(0.01)))
+    model.add(Activation('relu'))
     model.add(BatchNormalization())
     model.add(BatchNormalization())
-    model.add(Dense(300, kernel_regularizer=l2(0.01)))
+    model.add(Dropout(0.1))
+
+    model.add(Dense(100, kernel_regularizer=l2(0.01)))
     model.add(Activation('relu'))
     model.add(Activation('relu'))
-    model.add(Dropout(0.4))
+    model.add(BatchNormalization())
+    model.add(Dropout(0.1))
 
 
-    model.add(Dense(30, kernel_regularizer=l2(0.01)))
+    model.add(Dense(200, kernel_regularizer=l2(0.01)))
+    model.add(Activation('relu'))
     model.add(BatchNormalization())
     model.add(BatchNormalization())
+    model.add(Dropout(0.2))
+
+    model.add(Dense(300, kernel_regularizer=l2(0.01)))
     model.add(Activation('relu'))
     model.add(Activation('relu'))
+    model.add(BatchNormalization())
     model.add(Dropout(0.3))
     model.add(Dropout(0.3))
 
 
+    model.add(Dense(200, kernel_regularizer=l2(0.01)))
+    model.add(Activation('relu'))
+    model.add(BatchNormalization())
+    model.add(Dropout(0.2))
+
     model.add(Dense(100, kernel_regularizer=l2(0.01)))
     model.add(Dense(100, kernel_regularizer=l2(0.01)))
+    model.add(Activation('relu'))
     model.add(BatchNormalization())
     model.add(BatchNormalization())
+    model.add(Dropout(0.1))
+
+    model.add(Dense(50, kernel_regularizer=l2(0.01)))
     model.add(Activation('relu'))
     model.add(Activation('relu'))
-    model.add(Dropout(0.2))
+    model.add(BatchNormalization())
+    model.add(Dropout(0.1))
 
 
     model.add(Dense(20, kernel_regularizer=l2(0.01)))
     model.add(Dense(20, kernel_regularizer=l2(0.01)))
-    model.add(BatchNormalization())
     model.add(Activation('relu'))
     model.add(Activation('relu'))
+    model.add(BatchNormalization())
     model.add(Dropout(0.1))
     model.add(Dropout(0.1))
 
 
     model.add(Dense(1))
     model.add(Dense(1))