Parcourir la source

Renormalization possibility added; Update of generation data

Jérôme BUISINE il y a 6 ans
Parent
commit
3566dc6550

+ 9 - 5
README.md

@@ -14,12 +14,16 @@ python generate_all_data.py --metric all
 For noise detection, many metrics are available :
 For noise detection, many metrics are available :
 - lab
 - lab
 - mscn
 - mscn
+- mscn_revisited
 - low_bits_2
 - low_bits_2
 - low_bits_4
 - low_bits_4
+- low_bits_5
+- low_bits_6
+- low_bits_4_shifted_2
 
 
-You can also specify metric you want to compute :
+You can also specify metric you want to compute and image step to avoid some images :
 ```bash
 ```bash
-python generate_all_data.py --metric mscn
+python generate_all_data.py --metric mscn --step 50
 ```
 ```
 
 
 ## How to use
 ## How to use
@@ -27,9 +31,9 @@ python generate_all_data.py --metric mscn
 ### Multiple folders and scripts are availables :
 ### Multiple folders and scripts are availables :
 
 
 
 
-- **fichiersSVD/\*** : all scene files information (zones of each scene, SVD descriptor files information and so on...).
-- **fichiersSVD_light/\*** : all scene files information (zones of each scene, SVD descriptor files information and so on...) but here with reduction of information for few scenes. Information used in our case.
+- **fichiersSVD_light/\*** : all scene files information (zones of each scene, SVD descriptor files information and so on...).
 - **models/*.py** : all models developed to predict noise in image.
 - **models/*.py** : all models developed to predict noise in image.
+- **utils/** : contains all usefull script or modules.
 - **data/\*** : folder which will contain all *.train* & *.test* files in order to train model.
 - **data/\*** : folder which will contain all *.train* & *.test* files in order to train model.
 - **saved_models/*.joblib** : all scikit learn models saved.
 - **saved_models/*.joblib** : all scikit learn models saved.
 - **models_info/*.md** : all markdown files generated to get quick information about model performance and prediction.
 - **models_info/*.md** : all markdown files generated to get quick information about model performance and prediction.
@@ -159,4 +163,4 @@ All others bash scripts are used to combine and run multiple model combinations.
 
 
 ## How to contribute
 ## How to contribute
 
 
-This git project uses [git-flow](https://danielkummer.github.io/git-flow-cheatsheet/) implementation. You are free to contribute to it.git 
+This git project uses [git-flow](https://danielkummer.github.io/git-flow-cheatsheet/) implementation. You are free to contribute to it.git 

+ 62 - 113
generate_all_data.py

@@ -13,6 +13,7 @@ import random
 import time
 import time
 import json
 import json
 
 
+from utils.data_type_module import get_svd_data
 from PIL import Image
 from PIL import Image
 from ipfml import image_processing
 from ipfml import image_processing
 from ipfml import metrics
 from ipfml import metrics
@@ -33,6 +34,7 @@ zones = np.arange(16)
 seuil_expe_filename = 'seuilExpe'
 seuil_expe_filename = 'seuilExpe'
 
 
 metric_choices = ['lab', 'mscn', 'mscn_revisited', 'low_bits_2', 'low_bits_3', 'low_bits_4', 'low_bits_5', 'low_bits_6','low_bits_4_shifted_2']
 metric_choices = ['lab', 'mscn', 'mscn_revisited', 'low_bits_2', 'low_bits_3', 'low_bits_4', 'low_bits_5', 'low_bits_6','low_bits_4_shifted_2']
+picture_step = 10
 
 
 def generate_data_svd(data_type, mode):
 def generate_data_svd(data_type, mode):
     """
     """
@@ -46,7 +48,7 @@ def generate_data_svd(data_type, mode):
     scenes = [s for s in scenes if min_max_filename not in s]
     scenes = [s for s in scenes if min_max_filename not in s]
 
 
     # keep in memory min and max data found from data_type
     # keep in memory min and max data found from data_type
-    min_val_found = 100000000000
+    min_val_found = sys.maxsize
     max_val_found = 0
     max_val_found = 0
 
 
     data_min_max_filename = os.path.join(path, data_type + min_max_filename)
     data_min_max_filename = os.path.join(path, data_type + min_max_filename)
@@ -95,167 +97,108 @@ def generate_data_svd(data_type, mode):
 
 
         while(current_counter_index <= end_counter_index):
         while(current_counter_index <= end_counter_index):
 
 
-            current_counter_index_str = str(current_counter_index)
+            if current_counter_index % picture_step == 0:
+                current_counter_index_str = str(current_counter_index)
 
 
-            while len(start_index_image) > len(current_counter_index_str):
-                current_counter_index_str = "0" + current_counter_index_str
+                while len(start_index_image) > len(current_counter_index_str):
+                    current_counter_index_str = "0" + current_counter_index_str
 
 
-            img_path = os.path.join(scene_path, prefix_image_name + current_counter_index_str + ".png")
+                img_path = os.path.join(scene_path, prefix_image_name + current_counter_index_str + ".png")
 
 
-            current_img = Image.open(img_path)
-            img_blocks = image_processing.divide_in_blocks(current_img, (200, 200))
+                current_img = Image.open(img_path)
+                img_blocks = image_processing.divide_in_blocks(current_img, (200, 200))
 
 
-            for id_block, block in enumerate(img_blocks):
+                for id_block, block in enumerate(img_blocks):
 
 
-                ###########################
-                # Metric computation part #
-                ###########################
+                    ###########################
+                    # Metric computation part #
+                    ###########################
 
 
-                # get data from mode
-                # Here you can add the way you compute data
-                if data_type == 'lab':
+                    data = get_svd_data(data_type, block)
 
 
-                    block_file_path = '/tmp/lab_img.png'
-                    block.save(block_file_path)
-                    data = image_processing.get_LAB_L_SVD_s(Image.open(block_file_path))
+                    ##################
+                    # Data mode part #
+                    ##################
 
 
-                if data_type == 'mscn_revisited':
+                    # modify data depending mode
+                    if mode == 'svdne':
 
 
-                    img_mscn_revisited = image_processing.rgb_to_mscn(block)
+                        # getting max and min information from min_max_filename
+                        with open(data_min_max_filename, 'r') as f:
+                            min_val = float(f.readline())
+                            max_val = float(f.readline())
 
 
-                    # save tmp as img
-                    img_output = Image.fromarray(img_mscn_revisited.astype('uint8'), 'L')
-                    mscn_revisited_file_path = '/tmp/mscn_revisited_img.png'
-                    img_output.save(mscn_revisited_file_path)
-                    img_block = Image.open(mscn_revisited_file_path)
+                        data = image_processing.normalize_arr_with_range(data, min_val, max_val)
 
 
-                    # extract from temp image
-                    data = metrics.get_SVD_s(img_block)
+                    if mode == 'svdn':
+                        data = image_processing.normalize_arr(data)
 
 
-                if data_type == 'mscn':
+                    # save min and max found from dataset in order to normalize data using whole data known
+                    if mode == 'svd':
 
 
-                    img_gray = np.array(color.rgb2gray(np.asarray(block))*255, 'uint8')
-                    img_mscn = image_processing.calculate_mscn_coefficients(img_gray, 7)
-                    img_mscn_norm = image_processing.normalize_2D_arr(img_mscn)
+                        current_min = data.min()
+                        current_max = data.max()
 
 
-                    img_mscn_gray = np.array(img_mscn_norm*255, 'uint8')
+                        if current_min < min_val_found:
+                            min_val_found = current_min
 
 
-                    data = metrics.get_SVD_s(img_mscn_gray)
+                        if current_max > max_val_found:
+                            max_val_found = current_max
 
 
-                if data_type == 'low_bits_6':
+                    # now write data into current writer
+                    current_file = svd_output_files[id_block]
 
 
-                    low_bits_6 = image_processing.rgb_to_LAB_L_low_bits(block, 63)
+                    # add of index
+                    current_file.write(current_counter_index_str + ';')
 
 
-                    # extract from temp image
-                    data = metrics.get_SVD_s(low_bits_6)
+                    for val in data:
+                        current_file.write(str(val) + ";")
 
 
-                if data_type == 'low_bits_5':
-
-                    low_bits_5 = image_processing.rgb_to_LAB_L_low_bits(block, 31)
-
-                    # extract from temp image
-                    data = metrics.get_SVD_s(low_bits_5)
-
-
-                if data_type == 'low_bits_4':
-
-                    low_bits_4 = image_processing.rgb_to_LAB_L_low_bits(block)
-
-                    # extract from temp image
-                    data = metrics.get_SVD_s(low_bits_4)
-
-                if data_type == 'low_bits_3':
-
-                    low_bits_3 = image_processing.rgb_to_LAB_L_low_bits(block, 7)
-
-                    # extract from temp image
-                    data = metrics.get_SVD_s(low_bits_3)
-
-                if data_type == 'low_bits_2':
-
-                    low_bits_2 = image_processing.rgb_to_LAB_L_low_bits(block, 3)
-
-                    # extract from temp image
-                    data = metrics.get_SVD_s(low_bits_2)
-
-                if data_type == 'low_bits_4_shifted_2':
-
-                    data = metrics.get_SVD_s(image_processing.rgb_to_LAB_L_bits(block, (3, 6)))
-
-
-                ##################
-                # Data mode part #
-                ##################
-
-                # modify data depending mode
-                if mode == 'svdne':
-
-                    # getting max and min information from min_max_filename
-                    with open(data_min_max_filename, 'r') as f:
-                        min_val = float(f.readline())
-                        max_val = float(f.readline())
-
-                    data = image_processing.normalize_arr_with_range(data, min_val, max_val)
-
-                if mode == 'svdn':
-                    data = image_processing.normalize_arr(data)
-
-                # save min and max found from dataset in order to normalize data using whole data known
-                if mode == 'svd':
-
-                    current_min = data.min()
-                    current_max = data.max()
-
-                    if current_min < min_val_found:
-                        min_val_found = current_min
-
-                    if current_max > max_val_found:
-                        max_val_found = current_max
-
-                # now write data into current writer
-                current_file = svd_output_files[id_block]
-
-                # add of index
-                current_file.write(current_counter_index_str + ';')
-
-                for val in data:
-                    current_file.write(str(val) + ";")
-
-                current_file.write('\n')
+                    current_file.write('\n')
 
 
             start_index_image_int = int(start_index_image)
             start_index_image_int = int(start_index_image)
             print(data_type + "_" + mode + "_" + folder_scene + " - " + "{0:.2f}".format((current_counter_index - start_index_image_int) / (end_counter_index - start_index_image_int)* 100.) + "%")
             print(data_type + "_" + mode + "_" + folder_scene + " - " + "{0:.2f}".format((current_counter_index - start_index_image_int) / (end_counter_index - start_index_image_int)* 100.) + "%")
+            sys.stdout.write("\033[F")
+
             current_counter_index += step_counter
             current_counter_index += step_counter
 
 
         for f in svd_output_files:
         for f in svd_output_files:
             f.close()
             f.close()
 
 
+        print('\n')
+
     # save current information about min file found
     # save current information about min file found
     if mode == 'svd':
     if mode == 'svd':
         with open(data_min_max_filename, 'w') as f:
         with open(data_min_max_filename, 'w') as f:
             f.write(str(min_val_found) + '\n')
             f.write(str(min_val_found) + '\n')
             f.write(str(max_val_found) + '\n')
             f.write(str(max_val_found) + '\n')
 
 
-    print("End of data generation")
+    print("%s : end of data generation\n" % _mode)
 
 
 
 
 def main():
 def main():
 
 
+    # default value of p_step
+    p_step = 10
+
     if len(sys.argv) <= 1:
     if len(sys.argv) <= 1:
         print('Run with default parameters...')
         print('Run with default parameters...')
         print('python generate_all_data.py --metric all')
         print('python generate_all_data.py --metric all')
         print('python generate_all_data.py --metric lab')
         print('python generate_all_data.py --metric lab')
+        print('python generate_all_data.py --metric lab --step 10')
         sys.exit(2)
         sys.exit(2)
     try:
     try:
-        opts, args = getopt.getopt(sys.argv[1:], "hm", ["help=", "metric="])
+        opts, args = getopt.getopt(sys.argv[1:], "hms", ["help=", "metric=", "step="])
     except getopt.GetoptError:
     except getopt.GetoptError:
         # print help information and exit:
         # print help information and exit:
-        print('python generate_all_data.py --metric all')
+        print('python generate_all_data.py --metric all --step 10')
         sys.exit(2)
         sys.exit(2)
     for o, a in opts:
     for o, a in opts:
         if o == "-h":
         if o == "-h":
-            print('python generate_all_data.py --metric all')
+            print('python generate_all_data.py --metric all --step 10')
             sys.exit()
             sys.exit()
+        elif o in ("-s", "--step"):
+            p_step = int(a)
         elif o in ("-m", "--metric"):
         elif o in ("-m", "--metric"):
             p_metric = a
             p_metric = a
 
 
@@ -264,6 +207,12 @@ def main():
         else:
         else:
             assert False, "unhandled option"
             assert False, "unhandled option"
 
 
+    global picture_step
+    picture_step = p_step
+
+    if picture_step % 10 != 0:
+        assert False, "Picture step variable needs to be divided by ten"
+
     # generate all or specific metric data
     # generate all or specific metric data
     if p_metric == 'all':
     if p_metric == 'all':
         for m in metric_choices:
         for m in metric_choices:

+ 2 - 2
generate_data_model_random.py

@@ -71,7 +71,7 @@ def generate_data_model(_filename, _interval, _choice, _metric, _scenes = scenes
     test_file = open(output_test_filename, 'w')
     test_file = open(output_test_filename, 'w')
 
 
     scenes = os.listdir(path)
     scenes = os.listdir(path)
-    
+
     # remove min max file from scenes folder
     # remove min max file from scenes folder
     scenes = [s for s in scenes if min_max_filename not in s]
     scenes = [s for s in scenes if min_max_filename not in s]
 
 
@@ -111,7 +111,7 @@ def generate_data_model(_filename, _interval, _choice, _metric, _scenes = scenes
                 line = construct_new_line(path_seuil, _interval, lines[index], _sep, _index)
                 line = construct_new_line(path_seuil, _interval, lines[index], _sep, _index)
 
 
                 percent = counter / num_lines
                 percent = counter / num_lines
-                
+
                 if id_zone < _nb_zones and folder_scene in _scenes and percent <= _percent:
                 if id_zone < _nb_zones and folder_scene in _scenes and percent <= _percent:
                     train_file.write(line)
                     train_file.write(line)
                 else:
                 else:

+ 87 - 11
generate_data_model_random_maxwell.py

@@ -31,13 +31,21 @@ path = './fichiersSVD_light'
 zones = np.arange(16)
 zones = np.arange(16)
 seuil_expe_filename = 'seuilExpe'
 seuil_expe_filename = 'seuilExpe'
 
 
-def construct_new_line(path_seuil, interval, line, sep, index):
+min_value_interval = sys.maxsize
+max_value_interval = 0
+
+def construct_new_line(path_seuil, interval, line, norm, sep, index):
     begin, end = interval
     begin, end = interval
 
 
     line_data = line.split(';')
     line_data = line.split(';')
     seuil = line_data[0]
     seuil = line_data[0]
     metrics = line_data[begin+1:end+1]
     metrics = line_data[begin+1:end+1]
 
 
+    metrics = [float(m) for m in metrics]
+
+    if norm:
+        metrics = image_processing.normalize_arr_with_range(metrics, min_value_interval, max_value_interval)
+
     with open(path_seuil, "r") as seuil_file:
     with open(path_seuil, "r") as seuil_file:
         seuil_learned = int(seuil_file.readline().strip())
         seuil_learned = int(seuil_file.readline().strip())
 
 
@@ -50,12 +58,71 @@ def construct_new_line(path_seuil, interval, line, sep, index):
         if index:
         if index:
             line += " " + str(idx + 1)
             line += " " + str(idx + 1)
         line += sep
         line += sep
-        line += val
+        line += str(val)
     line += '\n'
     line += '\n'
 
 
     return line
     return line
 
 
-def generate_data_model(_filename, _interval, _choice, _metric, _scenes = scenes_list, _nb_zones = 4, _percent = 1, _sep=':', _index=True):
+def get_min_max_value_interval(_filename, _interval, _choice, _metric, _scenes = scenes_list, _nb_zones = 4, _percent = 1):
+
+    global min_value_interval, max_value_interval
+
+    scenes = os.listdir(path)
+
+    # remove min max file from scenes folder
+    scenes = [s for s in scenes if min_max_filename not in s]
+
+    for id_scene, folder_scene in enumerate(scenes):
+
+        # only take care of maxwell scenes
+        if folder_scene in scenes_list:
+
+            scene_path = os.path.join(path, folder_scene)
+
+            zones_folder = []
+            # create zones list
+            for index in zones:
+                index_str = str(index)
+                if len(index_str) < 2:
+                    index_str = "0" + index_str
+                zones_folder.append("zone"+index_str)
+
+            # shuffle list of zones (=> randomly choose zones)
+            random.shuffle(zones_folder)
+
+            for id_zone, zone_folder in enumerate(zones_folder):
+                zone_path = os.path.join(scene_path, zone_folder)
+                data_filename = _metric + "_" + _choice + generic_output_file_svd
+                data_file_path = os.path.join(zone_path, data_filename)
+
+                # getting number of line and read randomly lines
+                f = open(data_file_path)
+                lines = f.readlines()
+
+                counter = 0
+                # check if user select current scene and zone to be part of training data set
+                for line in lines:
+
+
+                    begin, end = _interval
+
+                    line_data = line.split(';')
+                    metrics = line_data[begin+1:end+1]
+                    metrics = [float(m) for m in metrics]
+
+                    min_value = min(metrics)
+                    max_value = max(metrics)
+
+                    if min_value < min_value_interval:
+                        min_value_interval = min_value
+
+                    if max_value > max_value_interval:
+                        max_value_interval = max_value
+
+                    counter += 1
+
+
+def generate_data_model(_filename, _interval, _choice, _metric, _scenes = scenes_list, _nb_zones = 4, _percent = 1, _norm = False, _sep=':', _index=True):
 
 
     output_train_filename = _filename + ".train"
     output_train_filename = _filename + ".train"
     output_test_filename = _filename + ".test"
     output_test_filename = _filename + ".test"
@@ -71,7 +138,7 @@ def generate_data_model(_filename, _interval, _choice, _metric, _scenes = scenes
     test_file = open(output_test_filename, 'w')
     test_file = open(output_test_filename, 'w')
 
 
     scenes = os.listdir(path)
     scenes = os.listdir(path)
-    
+
     # remove min max file from scenes folder
     # remove min max file from scenes folder
     scenes = [s for s in scenes if min_max_filename not in s]
     scenes = [s for s in scenes if min_max_filename not in s]
 
 
@@ -112,10 +179,10 @@ def generate_data_model(_filename, _interval, _choice, _metric, _scenes = scenes
                 counter = 0
                 counter = 0
                 # check if user select current scene and zone to be part of training data set
                 # check if user select current scene and zone to be part of training data set
                 for index in lines_indexes:
                 for index in lines_indexes:
-                    line = construct_new_line(path_seuil, _interval, lines[index], _sep, _index)
+                    line = construct_new_line(path_seuil, _interval, lines[index], _norm, _sep, _index)
 
 
                     percent = counter / num_lines
                     percent = counter / num_lines
-                    
+
                     if id_zone < _nb_zones and folder_scene in _scenes and percent <= _percent:
                     if id_zone < _nb_zones and folder_scene in _scenes and percent <= _percent:
                         train_file.write(line)
                         train_file.write(line)
                     else:
                     else:
@@ -133,17 +200,17 @@ def main():
 
 
     if len(sys.argv) <= 1:
     if len(sys.argv) <= 1:
         print('Run with default parameters...')
         print('Run with default parameters...')
-        print('python generate_data_model_random.py --output xxxx --interval 0,20  --kind svdne --metric lab --scenes "A, B, D" --nb_zones 5 --percent 0.7 --sep : --rowindex 1')
+        print('python generate_data_model_random.py --output xxxx --interval 0,20  --kind svdne --metric lab --scenes "A, B, D" --nb_zones 5 --percent 0.7 --norm 1 --sep : --rowindex 1')
         sys.exit(2)
         sys.exit(2)
     try:
     try:
-        opts, args = getopt.getopt(sys.argv[1:], "ho:i:k:s:n:p:r", ["help=", "output=", "interval=", "kind=", "metric=","scenes=", "nb_zones=", "percent=", "sep=", "rowindex="])
+        opts, args = getopt.getopt(sys.argv[1:], "ho:i:k:s:n:p:r", ["help=", "output=", "interval=", "kind=", "metric=","scenes=", "nb_zones=", "percent=", "norm=", "sep=", "rowindex="])
     except getopt.GetoptError:
     except getopt.GetoptError:
         # print help information and exit:
         # print help information and exit:
-        print('python generate_data_model_random.py --output xxxx --interval 0,20  --kind svdne --metric lab --scenes "A, B, D" --nb_zones 5 --percent 0.7 --sep : --rowindex 1')
+        print('python generate_data_model_random.py --output xxxx --interval 0,20  --kind svdne --metric lab --scenes "A, B, D" --nb_zones 5 --percent 0.7 --norm 1 --sep : --rowindex 1')
         sys.exit(2)
         sys.exit(2)
     for o, a in opts:
     for o, a in opts:
         if o == "-h":
         if o == "-h":
-            print('python generate_data_model_random.py --output xxxx --interval 0,20  --kind svdne --metric lab --scenes "A, B, D" --nb_zones 5 --percent 0.7 --sep : --rowindex 1')
+            print('python generate_data_model_random.py --output xxxx --interval 0,20  --kind svdne --metric lab --scenes "A, B, D" --nb_zones 5 --percent 0.7 --norm 1 --sep : --rowindex 1')
             sys.exit()
             sys.exit()
         elif o in ("-o", "--output"):
         elif o in ("-o", "--output"):
             p_filename = a
             p_filename = a
@@ -157,6 +224,11 @@ def main():
             p_scenes = a.split(',')
             p_scenes = a.split(',')
         elif o in ("-n", "--nb_zones"):
         elif o in ("-n", "--nb_zones"):
             p_nb_zones = int(a)
             p_nb_zones = int(a)
+        elif o in ("-n", "--norm"):
+            if int(a) == 1:
+                p_norm = True
+            else:
+                p_norm = False
         elif o in ("-p", "--percent"):
         elif o in ("-p", "--percent"):
             p_percent = float(a)
             p_percent = float(a)
         elif o in ("-s", "--sep"):
         elif o in ("-s", "--sep"):
@@ -176,8 +248,12 @@ def main():
         index = scenes_indexes.index(scene_id.strip())
         index = scenes_indexes.index(scene_id.strip())
         scenes_selected.append(scenes_list[index])
         scenes_selected.append(scenes_list[index])
 
 
+    # find min max value if necessary to renormalize data
+    if p_norm:
+        get_min_max_value_interval(p_filename, p_interval, p_kind, p_metric, scenes_selected, p_nb_zones, p_percent)
+
     # create database using img folder (generate first time only)
     # create database using img folder (generate first time only)
-    generate_data_model(p_filename, p_interval, p_kind, p_metric, scenes_selected, p_nb_zones, p_percent, p_sep, p_rowindex)
+    generate_data_model(p_filename, p_interval, p_kind, p_metric, scenes_selected, p_nb_zones, p_percent, p_norm, p_sep, p_rowindex)
 
 
 if __name__== "__main__":
 if __name__== "__main__":
     main()
     main()

+ 0 - 241
helpful_scripts/test_mscn.py

@@ -1,241 +0,0 @@
-from ipfml import image_processing
-from PIL import Image
-import numpy as np
-from ipfml import metrics
-from skimage import color
-
-import cv2
-
-low_bits_svd_values_norm = []
-low_bits_svd_values_norm_together = []
-low_bits_svd_values = []
-
-mscn_svd_values_norm = []
-mscn_svd_values_norm_together = []
-mscn_svd_values = []
-
-lab_svd_values_norm = []
-lab_svd_values_norm_together = []
-lab_svd_values = []
-
-def open_and_display(path):
-    img = Image.open(path)
-
-    blocks = image_processing.divide_in_blocks(img, (200, 200), False)
-
-    block_used = blocks[11]
-
-    img_mscn = image_processing.rgb_to_mscn(block_used)
-
-    #img_mscn_norm = image_processing.normalize_2D_arr(img_mscn)
-
-    #print(img_mscn)
-    img_output = img_mscn.astype('uint8')
-
-    print('-------------------------')
-
-    # MSCN part computation
-    mscn_s = metrics.get_SVD_s(img_output)
-
-    mscn_svd_values.append(mscn_s)
-    mscn_svd_values_norm.append(image_processing.normalize_arr(mscn_s))
-
-    mscn_min_val = 10000000
-    mscn_max_val = 0
-
-     # check for each block of image
-    for block in blocks:
-
-        current_img_mscn = image_processing.rgb_to_mscn(block)
-
-        current_img_output = img_mscn.astype('uint8')
-
-        # MSCN part computation
-        current_mscn_s = metrics.get_SVD_s(img_output)
-
-        current_min = current_mscn_s.min()
-        current_max = current_mscn_s.max()
-
-        if current_min < mscn_min_val:
-            mscn_min_val = current_min
-
-        if current_max > mscn_max_val:
-            mscn_max_val = current_max
-
-    mscn_svd_values_norm_together.append(image_processing.normalize_arr_with_range(mscn_s, mscn_min_val, mscn_max_val))
-
-    # LAB part computation
-    path_block_img = '/tmp/lab_img.png'
-
-    img_used_pil = Image.fromarray(block_used.astype('uint8'), 'RGB')
-    img_used_pil.save(path_block_img)
-
-    #img_used_pil.show()
-
-    lab_s = image_processing.get_LAB_L_SVD_s(Image.open(path_block_img))
-
-    lab_svd_values.append(lab_s)
-    lab_svd_values_norm.append(image_processing.normalize_arr(lab_s))
-
-    lab_min_val = 10000000
-    lab_max_val = 0
-
-    # check for each block of image
-    for block in blocks:
-
-        current_img_used_pil = Image.fromarray(block.astype('uint8'), 'RGB')
-        current_img_used_pil.save(path_block_img)
-
-        current_lab_s = image_processing.get_LAB_L_SVD_s(Image.open(path_block_img))
-
-        current_min = current_lab_s.min()
-        current_max = current_lab_s.max()
-
-        if current_min < lab_min_val:
-            lab_min_val = current_min
-
-        if current_max > lab_max_val:
-            lab_max_val = current_max
-
-    lab_svd_values_norm_together.append(image_processing.normalize_arr_with_range(lab_s, lab_min_val, lab_max_val))
-
-    # computation of low bits parts
-    low_bits_block = image_processing.rgb_to_grey_low_bits(block_used)
-
-    low_bits_svd = metrics.get_SVD_s(low_bits_block)
-
-    low_bits_svd_values.append(low_bits_svd)
-    low_bits_svd_values_norm.append(image_processing.normalize_arr(low_bits_svd))
-
-    low_bits_min_val = 10000000
-    low_bits_max_val = 0
-
-
-        # check for each block of image
-    for block in blocks:
-
-        current_grey_block = np.array(color.rgb2gray(block)*255, 'uint8')
-        current_low_bit_block = current_grey_block & 15
-        current_low_bits_svd = metrics.get_SVD_s(current_low_bit_block)
-
-        current_min = current_low_bits_svd.min()
-        current_max = current_low_bits_svd.max()
-
-        if current_min < low_bits_min_val:
-            low_bits_min_val = current_min
-
-        if current_max > low_bits_max_val:
-            low_bits_max_val = current_max
-
-    low_bits_svd_values_norm_together.append(image_processing.normalize_arr_with_range(low_bits_svd, low_bits_min_val, low_bits_max_val))
-
-    # Other MSCN
-    img_grey = np.array(color.rgb2gray(np.asarray(block_used))*255, 'uint8')
-
-
-    img_mscn_in_grey = np.array(image_processing.normalize_2D_arr(image_processing.calculate_mscn_coefficients(img_grey, 7))*255, 'uint8')
-    svd_s_values = metrics.get_SVD_s(img_mscn_in_grey)
-    #print(svd_s_values[0:10])
-
-    img_mscn_pil = Image.fromarray(img_mscn_in_grey.astype('uint8'), 'L')
-    #img_mscn_pil.show()
-
-
-
-
-#path_noisy = '/home/jbuisine/Documents/Thesis/Development/NoiseDetection_In_SynthesisImages/fichiersSVD_light/Appart1opt02/appartAopt_00020.png'
-#path_threshold = '/home/jbuisine/Documents/Thesis/Development/NoiseDetection_In_SynthesisImages/fichiersSVD_light/Appart1opt02/appartAopt_00300.png'
-#path_ref = '/home/jbuisine/Documents/Thesis/Development/NoiseDetection_In_SynthesisImages/fichiersSVD_light/Appart1opt02/appartAopt_00900.png'
-
-path_noisy = '/home/jbuisine/Documents/Thesis/Development/NoiseDetection_In_SynthesisImages/fichiersSVD_light/Cuisine01/cuisine01_00050.png'
-path_threshold = '/home/jbuisine/Documents/Thesis/Development/NoiseDetection_In_SynthesisImages/fichiersSVD_light/Cuisine01/cuisine01_00400.png'
-path_ref = '/home/jbuisine/Documents/Thesis/Development/NoiseDetection_In_SynthesisImages/fichiersSVD_light/Cuisine01/cuisine01_01200.png'
-
-
-path_list = [path_noisy, path_threshold, path_ref]
-
-for p in path_list:
-    open_and_display(p)
-
-import matplotlib.pyplot as plt
-
-# SVD
-fig, (ax1, ax2, ax3) = plt.subplots(3, 1)
-# make a little extra space between the subplots
-fig.subplots_adjust(hspace=0.5)
-
-ax1.plot(lab_svd_values[0], label='Noisy')
-ax1.plot(lab_svd_values[1], label='Threshold')
-ax1.plot(lab_svd_values[2], label='Reference')
-ax1.set_ylabel('LAB SVD comparisons')
-ax1.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2)
-
-ax2.plot(mscn_svd_values[0], label='Noisy')
-ax2.plot(mscn_svd_values[1], label='Threshold')
-ax2.plot(mscn_svd_values[2], label='Reference')
-ax2.set_ylabel('MSCN SVD comparisons')
-ax2.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2)
-
-ax3.plot(low_bits_svd_values[0], label='Noisy')
-ax3.plot(low_bits_svd_values[1], label='Threshold')
-ax3.plot(low_bits_svd_values[2], label='Reference')
-ax3.set_ylabel('Low bits SVD comparisons')
-ax3.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2)
-
-plt.show()
-
-# SVDN
-
-fig, (ax1, ax2, ax3) = plt.subplots(3, 1)
-# make a little extra space between the subplots
-fig.subplots_adjust(hspace=0.5)
-
-ax1.plot(lab_svd_values_norm[0], label='Noisy')
-ax1.plot(lab_svd_values_norm[1], label='Threshold')
-ax1.plot(lab_svd_values_norm[2], label='Reference')
-ax1.set_ylabel('LAB SVDN comparisons')
-ax1.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2)
-
-ax2.plot(mscn_svd_values_norm[0], label='Noisy')
-ax2.plot(mscn_svd_values_norm[1], label='Threshold')
-ax2.plot(mscn_svd_values_norm[2], label='Reference')
-ax2.set_ylabel('MSCN SVDN comparisons')
-ax2.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2)
-
-ax3.plot(low_bits_svd_values_norm[0], label='Noisy')
-ax3.plot(low_bits_svd_values_norm[1], label='Threshold')
-ax3.plot(low_bits_svd_values_norm[2], label='Reference')
-ax3.set_ylabel('Low bits SVD comparisons')
-ax3.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2)
-
-plt.show()
-
-# SVDNE
-fig, (ax1, ax2, ax3) = plt.subplots(3, 1)
-# make a little extra space between the subplots
-fig.subplots_adjust(hspace=0.5)
-
-ax1.plot(lab_svd_values_norm_together[0], label='Noisy')
-ax1.plot(lab_svd_values_norm_together[1], label='Threshold')
-ax1.plot(lab_svd_values_norm_together[2], label='Reference')
-ax1.set_ylabel('LAB SVDNE comparisons')
-ax1.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2)
-
-ax2.plot(mscn_svd_values_norm_together[0], label='Noisy')
-ax2.plot(mscn_svd_values_norm_together[1], label='Threshold')
-ax2.plot(mscn_svd_values_norm_together[2], label='Reference')
-ax2.set_ylabel('MSCN SVDNE comparisons')
-ax2.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2)
-
-ax3.plot(low_bits_svd_values_norm_together[0], label='Noisy')
-ax3.plot(low_bits_svd_values_norm_together[1], label='Threshold')
-ax3.plot(low_bits_svd_values_norm_together[2], label='Reference')
-ax3.set_ylabel('Low bits SVD comparisons')
-ax3.legend(bbox_to_anchor=(0.7, 1), loc=2, borderaxespad=0.2)
-plt.show()
-
-
-#print(mscn_svd_values[0][0:3])
-#print(mscn_svd_values[1][0:3])
-#print(mscn_svd_values[2][0:3])
-

helpful_scripts/Curve_simulations/simulation_curves_zones_Appart1opt02 → utils/Curve_simulations/simulation_curves_zones_Appart1opt02


helpful_scripts/Curve_simulations/simulation_curves_zones_Cuisine01 → utils/Curve_simulations/simulation_curves_zones_Cuisine01


helpful_scripts/Curve_simulations/simulation_curves_zones_SdbCentre → utils/Curve_simulations/simulation_curves_zones_SdbCentre


helpful_scripts/Curve_simulations/simulation_curves_zones_SdbDroite → utils/Curve_simulations/simulation_curves_zones_SdbDroite


+ 0 - 0
utils/__init__.py


+ 83 - 0
utils/data_type_module.py

@@ -0,0 +1,83 @@
+from ipfml import image_processing, metrics
+from PIL import Image
+
+import numpy as np
+
+
+def get_svd_data(data_type, block):
+    """
+    Method which returns the data type expected
+    """
+
+    if data_type == 'lab':
+
+        block_file_path = '/tmp/lab_img.png'
+        block.save(block_file_path)
+        data = image_processing.get_LAB_L_SVD_s(Image.open(block_file_path))
+
+    if data_type == 'mscn_revisited':
+
+        img_mscn_revisited = image_processing.rgb_to_mscn(block)
+
+        # save tmp as img
+        img_output = Image.fromarray(img_mscn_revisited.astype('uint8'), 'L')
+        mscn_revisited_file_path = '/tmp/mscn_revisited_img.png'
+        img_output.save(mscn_revisited_file_path)
+        img_block = Image.open(mscn_revisited_file_path)
+
+        # extract from temp image
+        data = metrics.get_SVD_s(img_block)
+
+    if data_type == 'mscn':
+
+        img_gray = np.array(color.rgb2gray(np.asarray(block))*255, 'uint8')
+        img_mscn = image_processing.calculate_mscn_coefficients(img_gray, 7)
+        img_mscn_norm = image_processing.normalize_2D_arr(img_mscn)
+
+        img_mscn_gray = np.array(img_mscn_norm*255, 'uint8')
+
+        data = metrics.get_SVD_s(img_mscn_gray)
+
+    if data_type == 'low_bits_6':
+
+        low_bits_6 = image_processing.rgb_to_LAB_L_low_bits(block, 63)
+
+        # extract from temp image
+        data = metrics.get_SVD_s(low_bits_6)
+
+    if data_type == 'low_bits_5':
+
+        low_bits_5 = image_processing.rgb_to_LAB_L_low_bits(block, 31)
+
+        # extract from temp image
+        data = metrics.get_SVD_s(low_bits_5)
+
+
+    if data_type == 'low_bits_4':
+
+        low_bits_4 = image_processing.rgb_to_LAB_L_low_bits(block)
+
+        # extract from temp image
+        data = metrics.get_SVD_s(low_bits_4)
+
+    if data_type == 'low_bits_3':
+
+        low_bits_3 = image_processing.rgb_to_LAB_L_low_bits(block, 7)
+
+        # extract from temp image
+        data = metrics.get_SVD_s(low_bits_3)
+
+    if data_type == 'low_bits_2':
+
+        low_bits_2 = image_processing.rgb_to_LAB_L_low_bits(block, 3)
+
+        # extract from temp image
+        data = metrics.get_SVD_s(low_bits_2)
+
+    if data_type == 'low_bits_4_shifted_2':
+
+        data = metrics.get_SVD_s(image_processing.rgb_to_LAB_L_bits(block, (3, 6)))
+
+    return data
+
+

helpful_scripts/display_bits_shifted.py → utils/display_bits_shifted.py


helpful_scripts/display_bits_shifted_scene.py → utils/display_bits_shifted_scene.py


helpful_scripts/display_bits_values.py → utils/display_bits_values.py


helpful_scripts/display_scenes_zones.py → utils/display_scenes_zones.py


helpful_scripts/display_scenes_zones_shifted.py → utils/display_scenes_zones_shifted.py


helpful_scripts/display_svd_values.py → utils/display_svd_values.py


helpful_scripts/show_mscn.py → utils/show_mscn.py


helpful_scripts/show_simulation_curves.py → utils/show_simulation_curves.py