Browse Source

First CNN model version

jbuisine 2 years ago
commit
a22b32b71a
100 changed files with 181 additions and 0 deletions
  1. 2 0
      .gitignore
  2. 23 0
      README.md
  3. 11 0
      TODO.md
  4. 103 0
      classification_cnn_keras.py
  5. 42 0
      generate_dataset.py
  6. BIN
      img_train/final/appartAopt_00850.png
  7. BIN
      img_train/final/appartAopt_00860.png
  8. BIN
      img_train/final/appartAopt_00870.png
  9. BIN
      img_train/final/appartAopt_00880.png
  10. BIN
      img_train/final/appartAopt_00890.png
  11. BIN
      img_train/final/appartAopt_00900.png
  12. BIN
      img_train/final/bureau1_9700.png
  13. BIN
      img_train/final/bureau1_9750.png
  14. BIN
      img_train/final/bureau1_9800.png
  15. BIN
      img_train/final/bureau1_9850.png
  16. BIN
      img_train/final/bureau1_9900.png
  17. BIN
      img_train/final/bureau1_9950.png
  18. BIN
      img_train/final/cendrierIUT2_01180.png
  19. BIN
      img_train/final/cendrierIUT2_01240.png
  20. BIN
      img_train/final/cendrierIUT2_01300.png
  21. BIN
      img_train/final/cendrierIUT2_01360.png
  22. BIN
      img_train/final/cendrierIUT2_01420.png
  23. BIN
      img_train/final/cendrierIUT2_01480.png
  24. BIN
      img_train/final/cuisine01_01150.png
  25. BIN
      img_train/final/cuisine01_01160.png
  26. BIN
      img_train/final/cuisine01_01170.png
  27. BIN
      img_train/final/cuisine01_01180.png
  28. BIN
      img_train/final/cuisine01_01190.png
  29. BIN
      img_train/final/cuisine01_01200.png
  30. BIN
      img_train/final/echecs09750.png
  31. BIN
      img_train/final/echecs09800.png
  32. BIN
      img_train/final/echecs09850.png
  33. BIN
      img_train/final/echecs09900.png
  34. BIN
      img_train/final/echecs09950.png
  35. BIN
      img_train/final/echecs10000.png
  36. BIN
      img_train/final/pnd_39750.png
  37. BIN
      img_train/final/pnd_39800.png
  38. BIN
      img_train/final/pnd_39850.png
  39. BIN
      img_train/final/pnd_39900.png
  40. BIN
      img_train/final/pnd_39950.png
  41. BIN
      img_train/final/pnd_40000.png
  42. BIN
      img_train/noisy/appartAopt_00070.png
  43. BIN
      img_train/noisy/appartAopt_00080.png
  44. BIN
      img_train/noisy/appartAopt_00090.png
  45. BIN
      img_train/noisy/appartAopt_00100.png
  46. BIN
      img_train/noisy/appartAopt_00110.png
  47. BIN
      img_train/noisy/appartAopt_00120.png
  48. BIN
      img_train/noisy/bureau1_100.png
  49. BIN
      img_train/noisy/bureau1_1000.png
  50. BIN
      img_train/noisy/bureau1_1050.png
  51. BIN
      img_train/noisy/bureau1_1100.png
  52. BIN
      img_train/noisy/bureau1_1150.png
  53. BIN
      img_train/noisy/bureau1_1250.png
  54. BIN
      img_train/noisy/cendrierIUT2_00040.png
  55. BIN
      img_train/noisy/cendrierIUT2_00100.png
  56. BIN
      img_train/noisy/cendrierIUT2_00160.png
  57. BIN
      img_train/noisy/cendrierIUT2_00220.png
  58. BIN
      img_train/noisy/cendrierIUT2_00280.png
  59. BIN
      img_train/noisy/cendrierIUT2_00340.png
  60. BIN
      img_train/noisy/cuisine01_00050.png
  61. BIN
      img_train/noisy/cuisine01_00060.png
  62. BIN
      img_train/noisy/cuisine01_00070.png
  63. BIN
      img_train/noisy/cuisine01_00080.png
  64. BIN
      img_train/noisy/cuisine01_00090.png
  65. BIN
      img_train/noisy/cuisine01_00100.png
  66. BIN
      img_train/noisy/echecs00050.png
  67. BIN
      img_train/noisy/echecs00100.png
  68. BIN
      img_train/noisy/echecs00150.png
  69. BIN
      img_train/noisy/echecs00200.png
  70. BIN
      img_train/noisy/echecs00250.png
  71. BIN
      img_train/noisy/echecs00300.png
  72. BIN
      img_train/noisy/pnd_100.png
  73. BIN
      img_train/noisy/pnd_1000.png
  74. BIN
      img_train/noisy/pnd_1050.png
  75. BIN
      img_train/noisy/pnd_1150.png
  76. BIN
      img_train/noisy/pnd_1200.png
  77. BIN
      img_train/noisy/pnd_1300.png
  78. BIN
      img_validation/final/SdB2_00900.png
  79. BIN
      img_validation/final/SdB2_00910.png
  80. BIN
      img_validation/final/SdB2_00920.png
  81. BIN
      img_validation/final/SdB2_00930.png
  82. BIN
      img_validation/final/SdB2_00940.png
  83. BIN
      img_validation/final/SdB2_00950.png
  84. BIN
      img_validation/final/SdB2_D_00900.png
  85. BIN
      img_validation/final/SdB2_D_00910.png
  86. BIN
      img_validation/final/SdB2_D_00920.png
  87. BIN
      img_validation/final/SdB2_D_00930.png
  88. BIN
      img_validation/final/SdB2_D_00940.png
  89. BIN
      img_validation/final/SdB2_D_00950.png
  90. BIN
      img_validation/final/selles_envir02850.png
  91. BIN
      img_validation/final/selles_envir02900.png
  92. BIN
      img_validation/final/selles_envir02950.png
  93. BIN
      img_validation/final/selles_envir03000.png
  94. BIN
      img_validation/final/selles_envir03050.png
  95. BIN
      img_validation/final/selles_envir03100.png
  96. BIN
      img_validation/noisy/SdB2_00020.png
  97. BIN
      img_validation/noisy/SdB2_00030.png
  98. BIN
      img_validation/noisy/SdB2_00040.png
  99. BIN
      img_validation/noisy/SdB2_00050.png
  100. 0 0
      img_validation/noisy/SdB2_00060.png

+ 2 - 0
.gitignore

@@ -0,0 +1,2 @@
+# project data
+data

+ 23 - 0
README.md

@@ -0,0 +1,23 @@
+# Noise detection project
+
+## Requirements
+
+```
+pip install -r requirements.txt
+```
+
+## How to use
+
+Generate dataset (run only once time) :
+```
+python generate_dataset.py
+```
+
+It will split scenes and generate all data you need for your neural network.
+You can specify the number of sub images you want in the script by modifying NUMBER_SUB_IMAGES variables.
+
+
+After your built your neural network in classification_cnn_keras.py, you just have to run it :
+```
+python classification_cnn_keras.py
+```

+ 11 - 0
TODO.md

@@ -0,0 +1,11 @@
+# 1. Create database 
+    - 6 scenes for train
+    - 3 scenes for validation
+    - Equilibrer noise / final classes
+
+# 2. Test CNN (check if size is correct)
+
+# 3. Results 
+    - noise_classification_32_16_16_32.h5 : 81.15%
+    - noise_classification_64_32_32_64.h5 : loss: 0.4416 - acc: 0.7993 - val_loss: 0.9338 - val_acc: 0.6943
+

+ 103 - 0
classification_cnn_keras.py

@@ -0,0 +1,103 @@
+'''This script goes along the blog post
+"Building powerful image classification models using very little data"
+from blog.keras.io.
+```
+data/
+    train/
+        final/
+            final001.png
+            final002.png
+            ...
+        noisy/
+            noisy001.png
+            noisy002.png
+            ...
+    validation/
+        final/
+            final001.png
+            final002.png
+            ...
+        noisy/
+            noisy001.png
+            noisy002.png
+            ...
+```
+'''
+
+from keras.preprocessing.image import ImageDataGenerator
+from keras.models import Sequential
+from keras.layers import Conv2D, MaxPooling2D
+from keras.layers import Activation, Dropout, Flatten, Dense
+from keras import backend as K
+
+
+# dimensions of our images.
+img_width, img_height = 20, 20
+
+train_data_dir = 'data/train'
+validation_data_dir = 'data/validation'
+nb_train_samples = 115200
+nb_validation_samples = 57600
+epochs = 50
+batch_size = 16
+
+if K.image_data_format() == 'channels_first':
+    input_shape = (3, img_width, img_height)
+else:
+    input_shape = (img_width, img_height, 3)
+
+model = Sequential()
+model.add(Conv2D(40, (3, 3), input_shape=input_shape))
+model.add(Activation('relu'))
+model.add(MaxPooling2D(pool_size=(2, 2)))
+
+model.add(Conv2D(20, (3, 3)))
+model.add(Activation('relu'))
+model.add(MaxPooling2D(pool_size=(2, 2)))
+
+model.add(Conv2D(40, (2, 2)))
+model.add(Activation('relu'))
+model.add(MaxPooling2D(pool_size=(2, 2)))
+
+model.add(Flatten())
+model.add(Dense(40))
+model.add(Activation('relu'))
+model.add(Dropout(0.5))
+model.add(Dense(1))
+model.add(Activation('sigmoid'))
+
+model.compile(loss='binary_crossentropy',
+              optimizer='rmsprop',
+              metrics=['accuracy'])
+
+# this is the augmentation configuration we will use for training
+train_datagen = ImageDataGenerator(
+    rescale=1. / 255,
+    shear_range=0.2,
+    zoom_range=0.2,
+    horizontal_flip=True)
+
+# this is the augmentation configuration we will use for testing:
+# only rescaling
+test_datagen = ImageDataGenerator(rescale=1. / 255)
+
+train_generator = train_datagen.flow_from_directory(
+    train_data_dir,
+    target_size=(img_width, img_height),
+    batch_size=batch_size,
+    class_mode='binary')
+
+validation_generator = test_datagen.flow_from_directory(
+    validation_data_dir,
+    target_size=(img_width, img_height),
+    batch_size=batch_size,
+    class_mode='binary')
+
+model.fit_generator(
+    train_generator,
+    steps_per_epoch=nb_train_samples // batch_size,
+    epochs=epochs,
+    validation_data=validation_generator,
+    validation_steps=nb_validation_samples // batch_size)
+
+model.save_weights('noise_classification_32_16_16_32_07_img20.h5')

+ 42 - 0
generate_dataset.py

@@ -0,0 +1,42 @@
+#!/usr/bin/env python2
+# -*- coding: utf-8 -*-
+"""
+Created on Fri Sep 14 21:02:42 2018
+
+@author: jbuisine
+"""
+
+from __future__ import print_function
+import keras
+from keras.datasets import cifar10
+from keras.preprocessing.image import ImageDataGenerator
+from keras.models import Sequential
+from keras.layers import Dense, Dropout, Activation, Flatten
+from keras.layers import Conv2D, MaxPooling2D
+import os, glob, image_slicer
+from PIL import Image
+
+# show to create own dataset https://gist.github.com/fchollet/0830affa1f7f19fd47b06d4cf89ed44d
+
+NUMBER_SUB_IMAGES = 1600
+
+def create_images(folder, output_folder):
+    images_path = glob.glob(folder + "/*.png")
+
+    for img in images_path:
+        image_name = img.replace(folder, '').replace('/', '')
+        tiles = image_slicer.slice(img, NUMBER_SUB_IMAGES, save = False)
+        image_slicer.save_tiles(tiles, directory=output_folder, prefix='part_'+image_name)
+
+def generate_dataset():
+    create_images('img_train/final', 'data/train/final')
+    create_images('img_train/noisy', 'data/train/noisy')
+    create_images('img_validation/final', 'data/validation/final')
+    create_images('img_validation/noisy', 'data/validation/noisy')
+
+def main():
+    # create database using img folder (generate first time only)
+    generate_dataset()
+
+if __name__== "__main__":
+    main()

BIN
img_train/final/appartAopt_00850.png


BIN
img_train/final/appartAopt_00860.png


BIN
img_train/final/appartAopt_00870.png


BIN
img_train/final/appartAopt_00880.png


BIN
img_train/final/appartAopt_00890.png


BIN
img_train/final/appartAopt_00900.png


BIN
img_train/final/bureau1_9700.png


BIN
img_train/final/bureau1_9750.png


BIN
img_train/final/bureau1_9800.png


BIN
img_train/final/bureau1_9850.png


BIN
img_train/final/bureau1_9900.png


BIN
img_train/final/bureau1_9950.png


BIN
img_train/final/cendrierIUT2_01180.png


BIN
img_train/final/cendrierIUT2_01240.png


BIN
img_train/final/cendrierIUT2_01300.png


BIN
img_train/final/cendrierIUT2_01360.png


BIN
img_train/final/cendrierIUT2_01420.png


BIN
img_train/final/cendrierIUT2_01480.png


BIN
img_train/final/cuisine01_01150.png


BIN
img_train/final/cuisine01_01160.png


BIN
img_train/final/cuisine01_01170.png


BIN
img_train/final/cuisine01_01180.png


BIN
img_train/final/cuisine01_01190.png


BIN
img_train/final/cuisine01_01200.png


BIN
img_train/final/echecs09750.png


BIN
img_train/final/echecs09800.png


BIN
img_train/final/echecs09850.png


BIN
img_train/final/echecs09900.png


BIN
img_train/final/echecs09950.png


BIN
img_train/final/echecs10000.png


BIN
img_train/final/pnd_39750.png


BIN
img_train/final/pnd_39800.png


BIN
img_train/final/pnd_39850.png


BIN
img_train/final/pnd_39900.png


BIN
img_train/final/pnd_39950.png


BIN
img_train/final/pnd_40000.png


BIN
img_train/noisy/appartAopt_00070.png


BIN
img_train/noisy/appartAopt_00080.png


BIN
img_train/noisy/appartAopt_00090.png


BIN
img_train/noisy/appartAopt_00100.png


BIN
img_train/noisy/appartAopt_00110.png


BIN
img_train/noisy/appartAopt_00120.png


BIN
img_train/noisy/bureau1_100.png


BIN
img_train/noisy/bureau1_1000.png


BIN
img_train/noisy/bureau1_1050.png


BIN
img_train/noisy/bureau1_1100.png


BIN
img_train/noisy/bureau1_1150.png


BIN
img_train/noisy/bureau1_1250.png


BIN
img_train/noisy/cendrierIUT2_00040.png


BIN
img_train/noisy/cendrierIUT2_00100.png


BIN
img_train/noisy/cendrierIUT2_00160.png


BIN
img_train/noisy/cendrierIUT2_00220.png


BIN
img_train/noisy/cendrierIUT2_00280.png


BIN
img_train/noisy/cendrierIUT2_00340.png


BIN
img_train/noisy/cuisine01_00050.png


BIN
img_train/noisy/cuisine01_00060.png


BIN
img_train/noisy/cuisine01_00070.png


BIN
img_train/noisy/cuisine01_00080.png


BIN
img_train/noisy/cuisine01_00090.png


BIN
img_train/noisy/cuisine01_00100.png


BIN
img_train/noisy/echecs00050.png


BIN
img_train/noisy/echecs00100.png


BIN
img_train/noisy/echecs00150.png


BIN
img_train/noisy/echecs00200.png


BIN
img_train/noisy/echecs00250.png


BIN
img_train/noisy/echecs00300.png


BIN
img_train/noisy/pnd_100.png


BIN
img_train/noisy/pnd_1000.png


BIN
img_train/noisy/pnd_1050.png


BIN
img_train/noisy/pnd_1150.png


BIN
img_train/noisy/pnd_1200.png


BIN
img_train/noisy/pnd_1300.png


BIN
img_validation/final/SdB2_00900.png


BIN
img_validation/final/SdB2_00910.png


BIN
img_validation/final/SdB2_00920.png


BIN
img_validation/final/SdB2_00930.png


BIN
img_validation/final/SdB2_00940.png


BIN
img_validation/final/SdB2_00950.png


BIN
img_validation/final/SdB2_D_00900.png


BIN
img_validation/final/SdB2_D_00910.png


BIN
img_validation/final/SdB2_D_00920.png


BIN
img_validation/final/SdB2_D_00930.png


BIN
img_validation/final/SdB2_D_00940.png


BIN
img_validation/final/SdB2_D_00950.png


BIN
img_validation/final/selles_envir02850.png


BIN
img_validation/final/selles_envir02900.png


BIN
img_validation/final/selles_envir02950.png


BIN
img_validation/final/selles_envir03000.png


BIN
img_validation/final/selles_envir03050.png


BIN
img_validation/final/selles_envir03100.png


BIN
img_validation/noisy/SdB2_00020.png


BIN
img_validation/noisy/SdB2_00030.png


BIN
img_validation/noisy/SdB2_00040.png


BIN
img_validation/noisy/SdB2_00050.png


+ 0 - 0
img_validation/noisy/SdB2_00060.png


Some files were not shown because too many files changed in this diff