Ver código fonte

Merge branch 'release/v0.2.1'

Jérôme BUISINE 5 anos atrás
pai
commit
4b3e3b975e

+ 131 - 40
analysis/svd_entropy_analysis.ipynb

@@ -2,7 +2,7 @@
  "cells": [
   {
    "cell_type": "code",
-   "execution_count": 1,
+   "execution_count": 154,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -17,7 +17,8 @@
     "import cv2\n",
     "import numpy as np\n",
     "import matplotlib.pyplot as plt\n",
-    "import os"
+    "import os\n",
+    "import math"
    ]
   },
   {
@@ -89,7 +90,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 16,
+   "execution_count": 5,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -124,7 +125,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 17,
+   "execution_count": 151,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -137,6 +138,51 @@
     "    return np.array(arr).argsort()[::-1][-n:][::-1]"
    ]
   },
+  {
+   "cell_type": "code",
+   "execution_count": 168,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def get_entropy(arr):\n",
+    "    arr = np.array(arr)\n",
+    "    eigen_values = []\n",
+    "    sum_eigen_values = (arr * arr).sum()\n",
+    "    print(sum_eigen_values)\n",
+    "\n",
+    "    for id, val in enumerate(arr):\n",
+    "        eigen_values.append(val * val)\n",
+    "        #print(id, \" : \", val)\n",
+    "\n",
+    "    v = []\n",
+    "\n",
+    "    for val in eigen_values:\n",
+    "        v.append(val / sum_eigen_values)\n",
+    "\n",
+    "    entropy = 0\n",
+    "\n",
+    "    for val in v:\n",
+    "        if val > 0:\n",
+    "            entropy += val * math.log(val)\n",
+    "\n",
+    "    entropy *= -1\n",
+    "\n",
+    "    entropy /= math.log(len(v))\n",
+    "    \n",
+    "    return entropy\n",
+    "\n",
+    "\n",
+    "def get_entropy_without_i(arr, i):\n",
+    "    \n",
+    "    arr = np.array([v for index, v in enumerate(arr) if index != i])\n",
+    "\n",
+    "    return get_entropy(arr)\n",
+    "\n",
+    "def get_entropy_contribution_of_i(arr, i):\n",
+    "\n",
+    "    return get_entropy(arr) - get_entropy_without_i(arr, i)"
+   ]
+  },
   {
    "cell_type": "markdown",
    "metadata": {},
@@ -146,7 +192,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 6,
+   "execution_count": 7,
    "metadata": {},
    "outputs": [],
    "source": [
@@ -177,26 +223,26 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 8,
    "metadata": {},
    "outputs": [],
    "source": [
-    "current_dict = dict_sdb_d\n",
+    "current_dict = dict_appart\n",
     "interval = (30, 200)"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
+   "execution_count": 9,
    "metadata": {},
    "outputs": [
     {
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "../fichiersSVD_light/SdbDroite/SdB2_D_00020.png\n",
-      "../fichiersSVD_light/SdbDroite/SdB2_D_00400.png\n",
-      "../fichiersSVD_light/SdbDroite/SdB2_D_00950.png\n"
+      "../fichiersSVD_light/Appart1opt02/appartAopt_00020.png\n",
+      "../fichiersSVD_light/Appart1opt02/appartAopt_00200.png\n",
+      "../fichiersSVD_light/Appart1opt02/appartAopt_00900.png\n"
      ]
     }
    ],
@@ -207,53 +253,71 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 28,
+   "execution_count": 169,
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "1277393.7121246634\n",
+      "1277393.7121246634\n",
+      "0 :  0.7291941465931915\n"
+     ]
+    }
+   ],
    "source": [
-    "first_image = zones_data[0][1]\n",
+    "first_image = zones_data[0][0]\n",
+    "# first_image = metrics.get_LAB_L(first_image)\n",
+    "\n",
+    "# print(first_image[0:2, 0:2])\n",
+    "# Image.fromarray(first_image).show()\n",
+    "\n",
+    "# first_image = np.asarray(Image.fromarray(first_image).convert('L'))\n",
+    "#first_image.show()\n",
+    "\n",
     "entropy_contribution_data = []\n",
     "\n",
-    "sv = processing.get_LAB_L_SVD_s(zone)\n",
-    "sv = utils.normalize_arr(sv)\n",
-    "entropy = utils.get_entropy(sv)\n",
+    "sv = processing.get_LAB_L_SVD_s(first_image)\n",
+    "# sv = utils.normalize_arr(sv)\n",
+    "#entropy = get_entropy(sv)\n",
     "\n",
-    "for i in range(200):\n",
-    "    entropy_without_column = utils.get_entropy_without_i(sv, i)\n",
-    "    entropy_contribution_column = entropy - entropy_without_column\n",
-    "    entropy_contribution_data.append(entropy_contribution_column)"
+    "#for i in range(200):\n",
+    "entropy_contribution_data.append(get_entropy_without_i(sv, 0))\n",
+    "print(0, \": \", get_entropy_without_i(sv, 0))"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 29,
+   "execution_count": 148,
    "metadata": {},
    "outputs": [
     {
-     "data": {
-      "text/plain": [
-       "array([  1,   2,   3,   4,   5,   6,   7,   8,   9,  10,  11,  12,  13,\n",
-       "        14,  15,  16,  17,  18,  19,  20,  21,  22,  23,  24,  25,  26,\n",
-       "        27,  28,  29,  30,  31,  32,  33,  34,  35,  36,  37,  38,  39,\n",
-       "        40,  41,  42,  43,  44,  45,  46,  47,  48,  49,  50,  51,  52,\n",
-       "        53,  54,  55,  56,  57,  58,  59,  60,  61,  62,  63,  64,  65,\n",
-       "        66,  67,  68,  69,  70,  71,  72,  73,  74,  75,  76,  77,  78,\n",
-       "        79,  80,  81,  82,  83,  84,  85,  86,  87,  88,  89,  90,  91,\n",
-       "        92,  93,  94,  95,  96,  97,  98,  99, 100])"
-      ]
-     },
-     "execution_count": 29,
-     "metadata": {},
-     "output_type": "execute_result"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[[[  0   0   0]\n",
+      "  [  0   0   0]]\n",
+      "\n",
+      " [[164 152 143]\n",
+      "  [159 144 132]]]\n",
+      "[87.9761409  0.       ]\n"
+     ]
     }
    ],
    "source": [
-    "get_highest_values(entropy_contribution_data, 100)"
+    "sub_blocks = processing.divide_in_blocks(first_image, (2,2))\n",
+    "sub_block = np.asarray(sub_blocks[0])\n",
+    "sub_block\n",
+    "\n",
+    "sv_values = processing.get_LAB_L_SVD_s(sub_block)\n",
+    "print(sub_block)\n",
+    "print(sv_values)"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 30,
+   "execution_count": 50,
    "metadata": {},
    "outputs": [
     {
@@ -269,7 +333,34 @@
        "       109, 108, 107, 106, 105, 104, 103, 102, 101])"
       ]
      },
-     "execution_count": 30,
+     "execution_count": 50,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "get_highest_values(entropy_contribution_data, 100)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 51,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "array([  1,   2,   3,   4,   5,   6,   7,   8,   9,  10,  11,  12,  13,\n",
+       "        14,  15,  16,  17,  18,  19,  20,  21,  22,  23,  24,  25,  26,\n",
+       "        27,  28,  29,  30,  31,  32,  33,  34,  35,  36,  37,  38,  39,\n",
+       "        40,  41,  42,  43,  44,  45,  46,  47,  48,  49,  50,  51,  52,\n",
+       "        53,  54,  55,  56,  57,  58,  59,  60,  61,  62,  63,  64,  65,\n",
+       "        66,  67,  68,  69,  70,  71,  72,  73,  74,  75,  76,  77,  78,\n",
+       "        79,  80,  81,  82,  83,  84,  85,  86,  87,  88,  89,  90,  91,\n",
+       "        92,  93,  94,  95,  96,  97,  98,  99, 100])"
+      ]
+     },
+     "execution_count": 51,
      "metadata": {},
      "output_type": "execute_result"
     }

Diferenças do arquivo suprimidas por serem muito extensas
+ 355 - 0
analysis/svd_ica_scenes_analysis.ipynb


Diferenças do arquivo suprimidas por serem muito extensas
+ 315 - 0
analysis/svd_ipca_scenes_analysis.ipynb


Diferenças do arquivo suprimidas por serem muito extensas
+ 16 - 16
analysis/svd_reconstruction_analysis.ipynb


Diferenças do arquivo suprimidas por serem muito extensas
+ 316 - 0
analysis/svd_truncated_svd_scenes_analysis.ipynb


+ 1 - 1
modules/utils/config.py

@@ -35,7 +35,7 @@ cycle_scenes_indices            = ['E', 'I']
 normalization_choices           = ['svd', 'svdn', 'svdne']
 zones_indices                   = np.arange(16)
 
-metric_choices_labels           = ['lab', 'mscn', 'low_bits_2', 'low_bits_3', 'low_bits_4', 'low_bits_5', 'low_bits_6','low_bits_4_shifted_2', 'sub_blocks_stats', 'sub_blocks_area', 'sub_blocks_stats_reduced', 'sub_blocks_area_normed', 'mscn_var_4', 'mscn_var_16', 'mscn_var_64', 'mscn_var_16_max', 'mscn_var_64_max']
+metric_choices_labels           = ['lab', 'mscn', 'low_bits_2', 'low_bits_3', 'low_bits_4', 'low_bits_5', 'low_bits_6','low_bits_4_shifted_2', 'sub_blocks_stats', 'sub_blocks_area', 'sub_blocks_stats_reduced', 'sub_blocks_area_normed', 'mscn_var_4', 'mscn_var_16', 'mscn_var_64', 'mscn_var_16_max', 'mscn_var_64_max', 'ica_diff', 'svd_trunc_diff', 'ipca_diff']
 
 keras_epochs                    = 500
 keras_batch                     = 32

+ 46 - 0
modules/utils/data.py

@@ -3,6 +3,9 @@ from modules.utils.config import *
 
 from PIL import Image
 from skimage import color
+from sklearn.decomposition import FastICA
+from sklearn.decomposition import IncrementalPCA
+from sklearn.decomposition import TruncatedSVD
 
 import numpy as np
 
@@ -202,6 +205,49 @@ def get_svd_data(data_type, block):
         indices = data.argsort()[-size:][::-1]
         data = data[indices]
 
+    if data_type == 'ica_diff':
+        current_image = metrics.get_LAB_L(block)
+
+        ica = FastICA(n_components=50)
+        ica.fit(current_image)
+
+        image_ica = ica.fit_transform(current_image)
+        image_restored = ica.inverse_transform(image_ica)
+
+        final_image = utils.normalize_2D_arr(image_restored)
+        final_image = np.array(final_image * 255, 'uint8')
+
+        sv_values = utils.normalize_arr(metrics.get_SVD_s(current_image))
+        ica_sv_values = utils.normalize_arr(metrics.get_SVD_s(final_image))
+
+        data = abs(np.array(sv_values) - np.array(ica_sv_values))
+
+    if data_type == 'svd_trunc_diff':
+
+        current_image = metrics.get_LAB_L(block)
+
+        svd = TruncatedSVD(n_components=30, n_iter=100, random_state=42)
+        transformed_image = svd.fit_transform(current_image)
+        restored_image = svd.inverse_transform(transformed_image)
+
+        reduced_image = (current_image - restored_image)
+
+        U, s, V = metrics.get_SVD(reduced_image)
+        data = s
+
+    if data_type == 'ipca_diff':
+
+        current_image = metrics.get_LAB_L(block)
+
+        transformer = IncrementalPCA(n_components=20, batch_size=25)
+        transformed_image = transformer.fit_transform(current_image)
+        restored_image = transformer.inverse_transform(transformed_image)
+
+        reduced_image = (current_image - restored_image)
+
+        U, s, V = metrics.get_SVD(reduced_image)
+        data = s
+
     return data
 
 def _get_mscn_variance(block, sub_block_size=(50, 50)):

+ 1 - 1
runAll_maxwell_custom.sh

@@ -18,7 +18,7 @@ fi
 
 for size in {"4","8","16","26","32","40"}; do
 
-    for metric in {"lab","mscn","low_bits_2","low_bits_3","low_bits_4","low_bits_5","low_bits_6","low_bits_4_shifted_2"}; do
+    for metric in {"lab","mscn","low_bits_2","low_bits_3","low_bits_4","low_bits_5","low_bits_6","low_bits_4_shifted_2","ica_diff","svd_trunc_diff","ipca_diff"}; do
         bash generateAndTrain_maxwell_custom.sh ${size} ${metric}
     done
 done

+ 1 - 1
runAll_maxwell_custom_center.sh

@@ -18,7 +18,7 @@ fi
 
 for size in {"4","8","16","26","32","40"}; do
 
-    for metric in {"lab","mscn","low_bits_2","low_bits_3","low_bits_4","low_bits_5","low_bits_6","low_bits_4_shifted_2"}; do
+    for metric in {"lab","mscn","low_bits_2","low_bits_3","low_bits_4","low_bits_5","low_bits_6","low_bits_4_shifted_2","ica_diff","svd_trunc_diff","ipca_diff"}; do
         bash generateAndTrain_maxwell_custom_center.sh ${size} ${metric}
     done
 done

+ 1 - 1
runAll_maxwell_custom_split.sh

@@ -18,7 +18,7 @@ fi
 
 for size in {"4","8","16","26","32","40"}; do
 
-    for metric in {"lab","mscn","low_bits_2","low_bits_3","low_bits_4","low_bits_5","low_bits_6","low_bits_4_shifted_2"}; do
+    for metric in {"lab","mscn","low_bits_2","low_bits_3","low_bits_4","low_bits_5","low_bits_6","low_bits_4_shifted_2","ica_diff","svd_trunc_diff","ipca_diff"}; do
         bash generateAndTrain_maxwell_custom_split.sh ${size} ${metric}
     done
 done