Parcourir la source

Update of normalization data for simulation

Jérôme BUISINE il y a 5 ans
Parent
commit
eba30149e1
2 fichiers modifiés avec 22 ajouts et 31 suppressions
  1. 2 2
      generate_data_model_random.py
  2. 20 29
      predict_noisy_image_svd.py

+ 2 - 2
generate_data_model_random.py

@@ -201,7 +201,7 @@ def generate_data_model(_scenes_list, _filename, _interval, _choice, _metric, _s
                     image_index = int(data.split(';')[0])
 
                     if image_index % _step == 0:
-                        line = construct_new_line(path_seuil, _interval, data, _choice, _norm, _sep, _index)
+                        line = construct_new_line(path_seuil, _interval, data, _choice, _norm)
 
                         if id_zone < _nb_zones and folder_scene in _scenes and percent <= _percent:
                             train_file_data.append(line)
@@ -219,7 +219,7 @@ def generate_data_model(_scenes_list, _filename, _interval, _choice, _metric, _s
         train_file.write(line)
 
     for line in test_file_data:
-        test_file_data.write(line)
+        test_file.write(line)
 
     train_file.close()
     test_file.close()

+ 20 - 29
predict_noisy_image_svd.py

@@ -69,37 +69,33 @@ def main():
     begin, end = p_interval
 
     # check mode to normalize data
+    if p_mode == 'svdne':
 
-    if p_custom:
-
-        data = data[begin:end]
+        # set min_max_filename if custom use
+        min_max_file_path = path + '/' + p_metric + min_max_ext
 
-        if p_mode == 'svdne':
+        # need to read min_max_file
+        file_path = os.path.join(os.path.dirname(__file__), min_max_file_path)
+        with open(file_path, 'r') as f:
+            min_val = float(f.readline().replace('\n', ''))
+            max_val = float(f.readline().replace('\n', ''))
 
-            # set min_max_filename if custom use
-            min_max_file_path = custom_min_max_folder + '/' +  p_custom
+        l_values = processing.normalize_arr_with_range(data, min_val, max_val)
 
-            # need to read min_max_file
-            file_path = os.path.join(os.path.dirname(__file__), min_max_file_path)
-            with open(file_path, 'r') as f:
-                min_val = float(f.readline().replace('\n', ''))
-                max_val = float(f.readline().replace('\n', ''))
-
-            l_values = processing.normalize_arr_with_range(data, min_val, max_val)
-
-        elif p_mode == 'svdn':
-            l_values = processing.normalize_arr(data)
-        else:
-            l_values = data
+    elif p_mode == 'svdn':
+        l_values = processing.normalize_arr(data)
+    else:
+        l_values = data
 
-        test_data = l_values
+    test_data = l_values[begin:end]
 
-    else:
+    # check if custom min max file is used
+    if p_custom:
 
         if p_mode == 'svdne':
 
             # set min_max_filename if custom use
-            min_max_file_path = path + '/' + p_metric + min_max_ext
+            min_max_file_path = custom_min_max_folder + '/' +  p_custom
 
             # need to read min_max_file
             file_path = os.path.join(os.path.dirname(__file__), min_max_file_path)
@@ -107,16 +103,11 @@ def main():
                 min_val = float(f.readline().replace('\n', ''))
                 max_val = float(f.readline().replace('\n', ''))
 
-            l_values = processing.normalize_arr_with_range(data, min_val, max_val)
-
-        elif p_mode == 'svdn':
-            l_values = processing.normalize_arr(data)
-        else:
-            l_values = data
+            test_data = processing.normalize_arr_with_range(test_data, min_val, max_val)
 
-        test_data = l_values[begin:end]
+        if p_mode == 'svdn':
+            test_data = processing.normalize_arr(test_data)
 
-    print(data)
 
     # get prediction of model
     prediction = model.predict([test_data])[0]