Parcourir la source

List of models to simulate

Jérôme BUISINE il y a 5 ans
Parent
commit
dfaee0db83

+ 1 - 1
.gitignore

@@ -5,7 +5,7 @@ threshold_map/*
 models_info/*
 models_info/*
 custom_norm/*
 custom_norm/*
 
 
-simulate_models.csv
+# simulate_models.csv
 
 
 fichiersSVD_light/*/*/*.csv
 fichiersSVD_light/*/*/*.csv
 fichiersSVD_light/*_min_max_values
 fichiersSVD_light/*_min_max_values

+ 58 - 41
cnn_keras_svd.py

@@ -2,28 +2,28 @@ from keras.preprocessing.image import ImageDataGenerator
 from keras.models import Sequential
 from keras.models import Sequential
 from keras.layers import Conv1D, MaxPooling1D
 from keras.layers import Conv1D, MaxPooling1D
 from keras.layers import Activation, Dropout, Flatten, Dense, BatchNormalization
 from keras.layers import Activation, Dropout, Flatten, Dense, BatchNormalization
+from keras.wrappers.scikit_learn import KerasClassifier
 from keras import backend as K
 from keras import backend as K
 import matplotlib.pyplot as plt
 import matplotlib.pyplot as plt
 
 
 from sklearn.utils import shuffle
 from sklearn.utils import shuffle
+from sklearn.metrics import roc_auc_score
 
 
 import numpy as np
 import numpy as np
 import pandas as pd
 import pandas as pd
 
 
 from ipfml import processing
 from ipfml import processing
+import modules.utils.config as cfg
+
 from PIL import Image
 from PIL import Image
 
 
-import sys, os, getopt
+import sys, os
+import argparse
+import json
+
 import subprocess
 import subprocess
 import time
 import time
 
 
-vector_size = 100
-epochs = 100
-batch_size = 24
-
-input_shape = (vector_size, 1)
-filename = "svd_model"
-
 def f1(y_true, y_pred):
 def f1(y_true, y_pred):
     def recall(y_true, y_pred):
     def recall(y_true, y_pred):
         """Recall metric.
         """Recall metric.
@@ -54,7 +54,7 @@ def f1(y_true, y_pred):
     recall = recall(y_true, y_pred)
     recall = recall(y_true, y_pred)
     return 2*((precision*recall)/(precision+recall+K.epsilon()))
     return 2*((precision*recall)/(precision+recall+K.epsilon()))
 
 
-def generate_model():
+def generate_model(input_shape):
 
 
     model = Sequential()
     model = Sequential()
 
 
@@ -90,35 +90,35 @@ def generate_model():
 
 
     model.add(Flatten(input_shape=input_shape))
     model.add(Flatten(input_shape=input_shape))
 
 
-    #model.add(Dense(2048))
-    #model.add(Activation('relu'))
-    #model.add(BatchNormalization())
-    #model.add(Dropout(0.3))
+    model.add(Dense(2048))
+    model.add(Activation('relu'))
+    model.add(BatchNormalization())
+    model.add(Dropout(0.2))
 
 
     model.add(Dense(1024))
     model.add(Dense(1024))
     model.add(Activation('relu'))
     model.add(Activation('relu'))
     model.add(BatchNormalization())
     model.add(BatchNormalization())
-    model.add(Dropout(0.4))
+    model.add(Dropout(0.2))
 
 
     model.add(Dense(512))
     model.add(Dense(512))
     model.add(Activation('relu'))
     model.add(Activation('relu'))
     model.add(BatchNormalization())
     model.add(BatchNormalization())
-    model.add(Dropout(0.4))
+    model.add(Dropout(0.3))
 
 
     model.add(Dense(256))
     model.add(Dense(256))
     model.add(Activation('relu'))
     model.add(Activation('relu'))
     model.add(BatchNormalization())
     model.add(BatchNormalization())
-    model.add(Dropout(0.4))
+    model.add(Dropout(0.3))
 
 
     model.add(Dense(128))
     model.add(Dense(128))
     model.add(Activation('relu'))
     model.add(Activation('relu'))
     model.add(BatchNormalization())
     model.add(BatchNormalization())
-    model.add(Dropout(0.4))
+    model.add(Dropout(0.3))
 
 
     model.add(Dense(20))
     model.add(Dense(20))
     model.add(Activation('relu'))
     model.add(Activation('relu'))
     model.add(BatchNormalization())
     model.add(BatchNormalization())
-    model.add(Dropout(0.4))
+    model.add(Dropout(0.3))
 
 
     model.add(Dense(1))
     model.add(Dense(1))
     model.add(Activation('sigmoid'))
     model.add(Activation('sigmoid'))
@@ -131,24 +131,22 @@ def generate_model():
 
 
 def main():
 def main():
 
 
-    if len(sys.argv) <= 1:
-        print('Run with default parameters...')
-        print('python save_model_result_in_md.py --data filename')
-        sys.exit(2)
-    try:
-        opts, args = getopt.getopt(sys.argv[1:], "hd", ["help=", "data="])
-    except getopt.GetoptError:
-        # print help information and exit:
-        print('python save_model_result_in_md.py --data filename')
-        sys.exit(2)
-    for o, a in opts:
-        if o == "-h":
-            print('python save_model_result_in_md.py --data filename')
-            sys.exit()
-        elif o in ("-d", "--data"):
-            p_datafile = a
-        else:
-            assert False, "unhandled option"
+    parser = argparse.ArgumentParser(description="Process deep_network_keras_svd.py parameters")
+
+    parser.add_argument('--data', type=str, help='Data filename prefix to access train and test dataset')
+    parser.add_argument('--output', type=str, help='Name of filename to save model into')
+    parser.add_argument('--size', type=int, help='Size of input data vector')
+
+    args = parser.parse_args()
+
+    p_datafile = args.data
+    p_output_filename = args.output
+    p_vector_size = args.size
+
+    epochs = 10
+    batch_size = cfg.keras_batch
+
+    input_shape = (p_vector_size, 1)
 
 
     ###########################
     ###########################
     # 1. Get and prepare data
     # 1. Get and prepare data
@@ -190,21 +188,40 @@ def main():
     # 2. Getting model
     # 2. Getting model
     #######################
     #######################
 
 
-    model = generate_model()
+    model = generate_model(input_shape)
     model.summary()
     model.summary()
+    model = KerasClassifier(build_fn=model, epochs=cfg.keras_epochs, batch_size=cfg.keras_batch, verbose=0)
 
 
     #######################
     #######################
     # 3. Fit model : use of cross validation to fit model
     # 3. Fit model : use of cross validation to fit model
     #######################
     #######################
 
 
     # reshape input data
     # reshape input data
-    x_dataset_train = np.array(x_dataset_train).reshape(len(x_dataset_train), vector_size, 1)
-    x_dataset_test = np.array(x_dataset_test).reshape(len(x_dataset_test), vector_size, 1)
+    x_dataset_train = np.array(x_dataset_train).reshape(len(x_dataset_train), p_vector_size, 1)
+    x_dataset_test = np.array(x_dataset_test).reshape(len(x_dataset_test), p_vector_size, 1)
 
 
-    model.fit(x_dataset_train, y_dataset_train, epochs=epochs, batch_size=batch_size, validation_split=0.20)
+    model.fit(x_dataset_train, y_dataset_train, validation_split=0.20)
 
 
     score = model.evaluate(x_dataset_test, y_dataset_test, batch_size=batch_size)
     score = model.evaluate(x_dataset_test, y_dataset_test, batch_size=batch_size)
-    print(score)
+
+    if not os.path.exists(cfg.saved_models_folder):
+        os.makedirs(cfg.saved_models_folder)
+
+    # save the model into HDF5 file
+    model_output_path = os.path.join(cfg.saved_models_folder, p_output_filename + '.json')
+    json_model_content = model.to_json()
+
+    with open(model_output_path, 'w') as f:
+        print("Model saved into ", model_output_path)
+        json.dump(json_model_content, f, indent=4)
+
+    model.save_weights(model_output_path.replace('.json', '.h5'))
+
+    # Save results obtained from model
+    y_test_prediction = model.predict(x_dataset_test)
+    print("Prediction : ", score)
+    print("ROC AUC : ", roc_auc_score(y_dataset_test, y_test_prediction))
+
 
 
 if __name__== "__main__":
 if __name__== "__main__":
     main()
     main()

+ 310 - 0
display_svd_area_scenes.py

@@ -0,0 +1,310 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Created on Fri Sep 14 21:02:42 2018
+
+@author: jbuisine
+"""
+
+from __future__ import print_function
+import sys, os, getopt
+
+import numpy as np
+import random
+import time
+import json
+
+from PIL import Image
+from ipfml import processing, metrics, utils
+import ipfml.iqa.fr as fr_iqa
+
+from skimage import color
+
+import matplotlib.pyplot as plt
+from modules.utils.data import get_svd_data
+
+from modules.utils import config as cfg
+
+# getting configuration information
+config_filename     = cfg.config_filename
+zone_folder         = cfg.zone_folder
+min_max_filename    = cfg.min_max_filename_extension
+
+# define all scenes values
+scenes_list         = cfg.scenes_names
+scenes_indices      = cfg.scenes_indices
+choices             = cfg.normalization_choices
+path                = cfg.dataset_path
+zones               = cfg.zones_indices
+seuil_expe_filename = cfg.seuil_expe_filename
+
+metric_choices      = cfg.metric_choices_labels
+
+max_nb_bits = 8
+
+integral_area_choices = ['trapz', 'simps']
+
+def get_area_under_curve(p_area, p_data):
+
+    noise_method = None
+    function_name = 'integral_area_' + p_area
+
+    try:
+        area_method = getattr(utils, function_name)
+    except AttributeError:
+        raise NotImplementedError("Error `{}` not implement `{}`".format(utils.__name__, function_name))
+
+    return area_method(p_data, dx=800)
+
+
+def display_svd_values(p_interval, p_indices, p_metric, p_mode, p_step, p_norm, p_area, p_ylim):
+    """
+    @brief Method which gives information about svd curves from zone of picture
+    @param p_scene, scene expected to show svd values
+    @param p_interval, interval [begin, end] of svd data to display
+    @param p_interval, interval [begin, end] of samples or minutes from render generation engine
+    @param p_metric, metric computed to show
+    @param p_mode, normalization's mode
+    @param p_norm, normalization or not of selected svd data
+    @param p_area, area method name to compute area under curve
+    @param p_ylim, ylim choice to better display of data
+    @return nothing
+    """
+
+    image_indices = []
+
+    scenes = os.listdir(path)
+    # remove min max file from scenes folder
+    scenes = [s for s in scenes if min_max_filename not in s]
+
+    begin_data, end_data = p_interval
+    begin_index, end_index = p_indices
+
+    data_min_max_filename = os.path.join(path, p_metric + min_max_filename)
+
+    # Store all informations about scenes
+    scenes_area_data = []
+    scenes_images_indices = []
+    scenes_threshold_mean = []
+
+    # go ahead each scenes
+    for id_scene, folder_scene in enumerate(scenes):
+
+        max_value_svd = 0
+        min_value_svd = sys.maxsize
+
+        scene_path = os.path.join(path, folder_scene)
+
+        config_file_path = os.path.join(scene_path, config_filename)
+
+        with open(config_file_path, "r") as config_file:
+            last_image_name = config_file.readline().strip()
+            prefix_image_name = config_file.readline().strip()
+            start_index_image = config_file.readline().strip()
+            end_index_image = config_file.readline().strip()
+            step_counter = int(config_file.readline().strip())
+
+        # construct each zones folder name
+        zones_folder = []
+
+        # get zones list info
+        for index in zones:
+            index_str = str(index)
+            if len(index_str) < 2:
+                index_str = "0" + index_str
+
+            current_zone = "zone"+index_str
+            zones_folder.append(current_zone)
+
+        # store data information for current scene
+        images_data = []
+        images_indices = []
+        threshold_learned_zones = []
+
+        for id, zone_folder in enumerate(zones_folder):
+
+            # get threshold information
+            zone_path = os.path.join(scene_path, zone_folder)
+            path_seuil = os.path.join(zone_path, seuil_expe_filename)
+
+            # open treshold path and get this information
+            with open(path_seuil, "r") as seuil_file:
+                threshold_learned = int(seuil_file.readline().strip())
+                threshold_learned_zones.append(threshold_learned)
+
+        current_counter_index = int(start_index_image)
+        end_counter_index = int(end_index_image)
+
+        threshold_mean = np.mean(np.asarray(threshold_learned_zones))
+        threshold_image_found = False
+        scenes_threshold_mean.append(int(threshold_mean / p_step))
+
+        file_path = os.path.join(scene_path, prefix_image_name + "{}.png")
+
+        svd_data = []
+
+        while(current_counter_index <= end_counter_index):
+
+            current_counter_index_str = str(current_counter_index)
+
+            while len(start_index_image) > len(current_counter_index_str):
+                current_counter_index_str = "0" + current_counter_index_str
+
+            image_path = file_path.format(str(current_counter_index_str))
+            img = Image.open(image_path)
+
+            svd_values = get_svd_data(p_metric, img)
+
+            if p_norm:
+                svd_values = svd_values[begin_data:end_data]
+
+            # update min max values
+            min_value = svd_values.min()
+            max_value = svd_values.max()
+
+            if min_value < min_value_svd:
+                min_value_svd = min_value
+
+            if max_value > min_value_svd:
+                max_value_svd = max_value
+
+            # keep in memory used data
+            if current_counter_index % p_step == 0:
+                if current_counter_index >= begin_index and current_counter_index <= end_index:
+                    images_indices.append(current_counter_index_str)
+                    svd_data.append(svd_values)
+
+                if threshold_mean < int(current_counter_index) and not threshold_image_found:
+
+                    threshold_image_found = True
+                    threshold_image_zone = current_counter_index_str
+
+            current_counter_index += step_counter
+            print('%.2f%%' % (current_counter_index / end_counter_index * 100))
+            sys.stdout.write("\033[F")
+
+
+            # all indices of picture to plot
+        print("Scene %s : %s" % (folder_scene, images_indices))
+
+
+        scenes_images_indices.append(image_indices)
+
+        area_data = []
+
+        for id, data in enumerate(svd_data):
+
+            current_data = data
+
+            if not p_norm:
+                current_data = current_data[begin_data:end_data]
+
+            if p_mode == 'svdn':
+                current_data = utils.normalize_arr(current_data)
+
+            if p_mode == 'svdne':
+                current_data = utils.normalize_arr_with_range(current_data, min_value_svd, max_value_svd)
+
+            images_data.append(current_data)
+
+            # not use this script for 'sub_blocks_stats'
+            current_area = get_area_under_curve(p_area, current_data)
+            area_data.append(current_area)
+
+        scenes_area_data.append(area_data)
+
+    # display all data using matplotlib (configure plt)
+    plt.title('Scenes area interval information SVD['+ str(begin_data) +', '+ str(end_data) +'], from scenes indices [' + str(begin_index) + ', '+ str(end_index) + ']' + p_metric + ' metric, ' + p_mode + ', with step of ' + str(p_step) + ', svd norm ' + str(p_norm), fontsize=20)
+    plt.ylabel('Image samples or time (minutes) generation', fontsize=14)
+    plt.xlabel('Vector features', fontsize=16)
+
+    plt.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2, fontsize=14)
+
+    for id, area_data in enumerate(scenes_area_data):
+
+        threshold_id = 0
+        scene_name = scenes[id]
+        image_indices = scenes_images_indices[id]
+        threshold_image_zone = scenes_threshold_mean[id]
+
+        p_label = scene_name + '_' + str(images_indices[id])
+
+        threshold_id = scenes_threshold_mean[id]
+
+        print(p_label)
+        start_ylim, end_ylim = p_ylim
+
+        plt.plot(area_data, label=p_label)
+        #ax2.set_xticks(range(len(images_indices)))
+        #ax2.set_xticklabels(list(map(int, images_indices)))
+        if threshold_id != 0:
+            print("Plot threshold ", threshold_id)
+            plt.plot([threshold_id, threshold_id], [np.min(area_data), np.max(area_data)], 'k-', lw=2, color='red')
+
+
+    #start_ylim, end_ylim = p_ylim
+    #plt.ylim(start_ylim, end_ylim)
+
+    plt.show()
+
+def main():
+
+
+    # by default p_step value is 10 to enable all photos
+    p_step = 10
+    p_ylim = (0, 1)
+
+    if len(sys.argv) <= 1:
+        print('Run with default parameters...')
+        print('python display_svd_area_scenes.py --interval "0,800" --indices "0, 900" --metric lab --mode svdne --step 50 --norm 0 --area simps --ylim "0, 0.1"')
+        sys.exit(2)
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "hs:i:i:z:l:m:s:n:a:y", ["help=", "scene=", "interval=", "indices=", "metric=", "mode=", "step=", "norm=", "area=", "ylim="])
+    except getopt.GetoptError:
+        # print help information and exit:
+        print('python display_svd_area_scenes.py --interval "0,800" --indices "0, 900" --metric lab --mode svdne --step 50 --norm 0 --area simps --ylim "0, 0.1"')
+        sys.exit(2)
+    for o, a in opts:
+        if o == "-h":
+            print('python display_svd_area_scenes.py --interval "0,800" --indices "0, 900" --metric lab --mode svdne --step 50 --norm 0 --area simps --ylim "0, 0.1"')
+            sys.exit()
+        elif o in ("-i", "--interval"):
+            p_interval = list(map(int, a.split(',')))
+
+        elif o in ("-i", "--indices"):
+            p_indices = list(map(int, a.split(',')))
+
+        elif o in ("-m", "--metric"):
+            p_metric = a
+
+            if p_metric not in metric_choices:
+                assert False, "Invalid metric choice"
+
+        elif o in ("-m", "--mode"):
+            p_mode = a
+
+            if p_mode not in choices:
+                assert False, "Invalid normalization choice, expected ['svd', 'svdn', 'svdne']"
+
+        elif o in ("-s", "--step"):
+            p_step = int(a)
+
+        elif o in ("-n", "--norm"):
+            p_norm = int(a)
+
+        elif o in ("-a", "--area"):
+            p_area = a
+
+            if p_area not in integral_area_choices:
+                assert False, "Invalid area computation choices : %s " % integral_area_choices
+
+        elif o in ("-y", "--ylim"):
+            p_ylim = list(map(float, a.split(',')))
+
+        else:
+            assert False, "unhandled option"
+
+    display_svd_values(p_interval, p_indices, p_metric, p_mode, p_step, p_norm, p_area, p_ylim)
+
+if __name__== "__main__":
+    main()

+ 4 - 1
modules/utils/config.py

@@ -13,7 +13,7 @@ seuil_expe_filename             = 'seuilExpe'
 min_max_filename_extension      = "_min_max_values"
 min_max_filename_extension      = "_min_max_values"
 config_filename                 = "config"
 config_filename                 = "config"
 
 
-models_names_list               = ["svm_model","ensemble_model","ensemble_model_v2"]
+models_names_list               = ["svm_model","ensemble_model","ensemble_model_v2","deep_keras"]
 
 
 # define all scenes values
 # define all scenes values
 renderer_choices                = ['all', 'maxwell', 'igloo', 'cycle']
 renderer_choices                = ['all', 'maxwell', 'igloo', 'cycle']
@@ -34,3 +34,6 @@ normalization_choices           = ['svd', 'svdn', 'svdne']
 zones_indices                   = np.arange(16)
 zones_indices                   = np.arange(16)
 
 
 metric_choices_labels           = ['lab', 'mscn_revisited', 'low_bits_2', 'low_bits_3', 'low_bits_4', 'low_bits_5', 'low_bits_6','low_bits_4_shifted_2', 'sub_blocks_stats', 'sub_blocks_area', 'sub_blocks_stats_reduced', 'sub_blocks_area_normed']
 metric_choices_labels           = ['lab', 'mscn_revisited', 'low_bits_2', 'low_bits_3', 'low_bits_4', 'low_bits_5', 'low_bits_6','low_bits_4_shifted_2', 'sub_blocks_stats', 'sub_blocks_area', 'sub_blocks_stats_reduced', 'sub_blocks_area_normed']
+
+keras_epochs                    = 10
+keras_batch                     = 32

+ 6 - 1
prediction_scene.py

@@ -4,8 +4,14 @@ import numpy as np
 
 
 import pandas as pd
 import pandas as pd
 from sklearn.metrics import accuracy_score
 from sklearn.metrics import accuracy_score
+from keras.models import Sequential
+from keras.layers import Conv1D, MaxPooling1D
+from keras.layers import Activation, Dropout, Flatten, Dense, BatchNormalization
+from keras import backend as K
+from keras.models import model_from_json
 
 
 import sys, os, getopt
 import sys, os, getopt
+import json
 
 
 from modules.utils import config as cfg
 from modules.utils import config as cfg
 
 
@@ -77,6 +83,5 @@ def main():
             for prediction in y_pred:
             for prediction in y_pred:
                 f.write(str(prediction) + '\n')
                 f.write(str(prediction) + '\n')
 
 
-
 if __name__== "__main__":
 if __name__== "__main__":
     main()
     main()

+ 51 - 0
runAll_maxwell_keras.sh

@@ -0,0 +1,51 @@
+#! bin/bash
+
+# erase "models_info/models_comparisons.csv" file and write new header
+file_path='models_info/models_comparisons.csv'
+
+erased=$1
+
+if [ "${erased}" == "Y" ]; then
+    echo "Previous data file erased..."
+    rm ${file_path}
+    mkdir -p models_info
+    touch ${file_path}
+
+    # add of header
+    echo 'model_name; vector_size; start_index; end; nb_zones; metric; mode; tran_size; val_size; test_size; train_pct_size; val_pct_size; test_pct_size; train_acc; val_acc; test_acc; all_acc; F1_train; recall_train; roc_auc_train; F1_test; recall_test; roc_auc_test; F1_all; recall_all; roc_auc_all;' >> ${file_path}
+
+fi
+
+start_index=0
+end_index=24
+
+# selection of four scenes (only maxwell)
+scenes="A, D, G, H"
+
+metrics_size=( ["sub_blocks_stats"]=24 ["sub_blocks_stats_reduced"]=20 ["sub_blocks_area"]=16 ["sub_blocks_area_normed"]=20)
+
+for metric in {"sub_blocks_stats","sub_blocks_stats_reduced","sub_blocks_area","sub_blocks_area_normed"}; do
+    for nb_zones in {4,6,8,10,12}; do
+
+        for mode in {"svd","svdn","svdne"}; do
+
+            end_index=${metrics_size[${metric}]}
+            FILENAME="data/data_maxwell_N${end_index}_B${start_index}_E${end_index}_nb_zones_${nb_zones}_${metric}_${mode}"
+            MODEL_NAME="deep_keras_N${end_index}_B${start_index}_E${end_index}_nb_zones_${nb_zones}_${metric}_${mode}"
+
+            echo $FILENAME
+
+            # only compute if necessary (perhaps server will fall.. Just in case)
+            if grep -q "${MODEL_NAME}" "${file_path}"; then
+
+                echo "${MODEL_NAME} results already generated..."
+            else
+                python generate_data_model_random.py --output ${FILENAME} --interval "${start_index},${end_index}" --kind ${mode} --metric ${metric} --scenes "${scenes}" --nb_zones "${nb_zones}" --percent 1 --renderer "maxwell" --step 10 --random 1
+                python deep_network_keras_svd.py --data ${FILENAME} --output ${MODEL_NAME} --size ${end_index}
+
+                python save_model_result_in_md_maxwell.py --interval "${start_index},${end_index}" --model "saved_models/${MODEL_NAME}.json" --mode "${mode}" --metric ${metric}
+            fi
+        done
+    done
+done
+

+ 27 - 6
save_model_result_in_md_maxwell.py

@@ -2,8 +2,15 @@ from sklearn.utils import shuffle
 from sklearn.externals import joblib
 from sklearn.externals import joblib
 from sklearn.metrics import accuracy_score, f1_score, recall_score, roc_auc_score
 from sklearn.metrics import accuracy_score, f1_score, recall_score, roc_auc_score
 from sklearn.model_selection import cross_val_score
 from sklearn.model_selection import cross_val_score
+from sklearn.model_selection import StratifiedKFold
 from sklearn.model_selection import train_test_split
 from sklearn.model_selection import train_test_split
 
 
+from keras.models import Sequential
+from keras.layers import Conv1D, MaxPooling1D
+from keras.layers import Activation, Dropout, Flatten, Dense, BatchNormalization
+from keras import backend as K
+from keras.models import model_from_json
+
 import numpy as np
 import numpy as np
 import pandas as pd
 import pandas as pd
 
 
@@ -13,6 +20,7 @@ from PIL import Image
 import sys, os, getopt
 import sys, os, getopt
 import subprocess
 import subprocess
 import time
 import time
+import json
 
 
 from modules.utils import config as cfg
 from modules.utils import config as cfg
 
 
@@ -30,6 +38,8 @@ current_dirpath = os.getcwd()
 
 
 def main():
 def main():
 
 
+    kind_model = 'keras'
+
     if len(sys.argv) <= 1:
     if len(sys.argv) <= 1:
         print('Run with default parameters...')
         print('Run with default parameters...')
         print('python save_model_result_in_md.py --interval "0,20" --model path/to/xxxx.joblib --mode ["svd", "svdn", "svdne"] --metric ["lab", "mscn"]')
         print('python save_model_result_in_md.py --interval "0,20" --model path/to/xxxx.joblib --mode ["svd", "svdn", "svdne"] --metric ["lab", "mscn"]')
@@ -68,18 +78,25 @@ def main():
     print(bash_cmd)
     print(bash_cmd)
 
 
     ## call command ##
     ## call command ##
-    p = subprocess.Popen(bash_cmd, stdout=subprocess.PIPE, shell=True)
+    #p = subprocess.Popen(bash_cmd, stdout=subprocess.PIPE, shell=True)
 
 
-    (output, err) = p.communicate()
+    #(output, err) = p.communicate()
 
 
     ## Wait for result ##
     ## Wait for result ##
-    p_status = p.wait()
+    #p_status = p.wait()
 
 
     if not os.path.exists(markdowns_folder):
     if not os.path.exists(markdowns_folder):
         os.makedirs(markdowns_folder)
         os.makedirs(markdowns_folder)
 
 
     # get model name to construct model
     # get model name to construct model
-    md_model_path = os.path.join(markdowns_folder, p_model_file.split('/')[-1].replace('.joblib', '.md'))
+
+    if '.joblib' in p_model_file:
+        kind_model = 'sklearn'
+        md_model_path = os.path.join(markdowns_folder, p_model_file.split('/')[-1].replace('.joblib', '.md'))
+
+    if '.json' in p_model_file:
+        kind_model = 'keras'
+        md_model_path = os.path.join(markdowns_folder, p_model_file.split('/')[-1].replace('.json', '.md'))
 
 
     with open(md_model_path, 'w') as f:
     with open(md_model_path, 'w') as f:
         f.write(output.decode("utf-8"))
         f.write(output.decode("utf-8"))
@@ -108,8 +125,9 @@ def main():
 
 
         f.close()
         f.close()
 
 
+
     # Keep model information to compare
     # Keep model information to compare
-    current_model_name = p_model_file.split('/')[-1].replace('.joblib', '')
+    current_model_name = p_model_file.split('/')[-1].replace('.json', '')
 
 
     # Prepare writing in .csv file
     # Prepare writing in .csv file
     output_final_file_path = os.path.join(markdowns_folder, final_csv_model_comparisons)
     output_final_file_path = os.path.join(markdowns_folder, final_csv_model_comparisons)
@@ -121,6 +139,8 @@ def main():
         if name in current_model_name:
         if name in current_model_name:
             current_data_file_path = os.path.join('data', current_model_name.replace(name, 'data_maxwell'))
             current_data_file_path = os.path.join('data', current_model_name.replace(name, 'data_maxwell'))
 
 
+    print("Current data file ")
+    print(current_data_file_path)
     model_scores = []
     model_scores = []
 
 
     ########################
     ########################
@@ -169,6 +189,7 @@ def main():
     # 3. Fit model : use of cross validation to fit model
     # 3. Fit model : use of cross validation to fit model
     #######################
     #######################
     model.fit(x_dataset_train, y_dataset_train)
     model.fit(x_dataset_train, y_dataset_train)
+
     val_scores = cross_val_score(model, x_dataset_train, y_dataset_train, cv=5)
     val_scores = cross_val_score(model, x_dataset_train, y_dataset_train, cv=5)
 
 
     ######################
     ######################
@@ -229,7 +250,7 @@ def main():
     model_scores.append(test_set_size / total_samples)
     model_scores.append(test_set_size / total_samples)
 
 
     # add of scores
     # add of scores
-    model_scores.append(val_scores.mean())
+    #model_scores.append(val_scores.mean())
     model_scores.append(val_accuracy)
     model_scores.append(val_accuracy)
     model_scores.append(test_accuracy)
     model_scores.append(test_accuracy)
     model_scores.append(all_accuracy)
     model_scores.append(all_accuracy)

BIN
saved_models/deep_keras_N20_B0_E20_nb_zones_4_sub_blocks_stats_svd.h5


BIN
saved_models/deep_keras_N20_B0_E20_nb_zones_4_sub_blocks_stats_svdn.h5


+ 8 - 0
simulate_models.csv

@@ -0,0 +1,8 @@
+ensemble_model_v2_N40_B0_E40_nb_zones_12_low_bits_2_svd
+ensemble_model_v2_N32_B0_E32_nb_zones_12_low_bits_2_svdne
+ensemble_model_N40_B0_E40_nb_zones_12_low_bits_3_svd
+svm_model_N8_B46_E54_nb_zones_12_low_bits_2_svd
+svm_model_N8_B96_E104_nb_zones_4_lab_svdne
+ensemble_model_v2_N8_B0_E8_nb_zones_12_low_bits_2_svdne
+svm_model_N32_B0_E32_nb_zones_12_low_bits_2_svdn
+ensemble_model_v2_N32_B0_E32_nb_zones_12_low_bits_2_svdn

BIN
simulate_models.ods