data_attributes.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126
  1. # main imports
  2. import numpy as np
  3. import sys
  4. # image transform imports
  5. from PIL import Image
  6. from skimage import color
  7. from sklearn.decomposition import FastICA
  8. from sklearn.decomposition import IncrementalPCA
  9. from sklearn.decomposition import TruncatedSVD
  10. from numpy.linalg import svd as lin_svd
  11. from scipy.signal import medfilt2d, wiener, cwt
  12. import pywt
  13. import cv2
  14. from ipfml.processing import transform, compression, segmentation
  15. from ipfml import utils
  16. # modules and config imports
  17. sys.path.insert(0, '') # trick to enable import of main folder module
  18. import custom_config as cfg
  19. from modules.utils import data as dt
  20. def get_svd_data(data_type, block):
  21. """
  22. Method which returns the data type expected
  23. """
  24. if 'filters_statistics' in data_type:
  25. img_width, img_height = 200, 200
  26. lab_img = transform.get_LAB_L(block)
  27. arr = np.array(lab_img)
  28. # compute all filters statistics
  29. def get_stats(arr, I_filter):
  30. e1 = np.abs(arr - I_filter)
  31. L = np.array(e1)
  32. mu0 = np.mean(L)
  33. A = L - mu0
  34. H = A * A
  35. E = np.sum(H) / (img_width * img_height)
  36. P = np.sqrt(E)
  37. return mu0, P
  38. stats = []
  39. kernel = np.ones((3,3),np.float32)/9
  40. stats.append(get_stats(arr, cv2.filter2D(arr,-1,kernel)))
  41. kernel = np.ones((5,5),np.float32)/25
  42. stats.append(get_stats(arr, cv2.filter2D(arr,-1,kernel)))
  43. stats.append(get_stats(arr, cv2.GaussianBlur(arr, (3, 3), 0.5)))
  44. stats.append(get_stats(arr, cv2.GaussianBlur(arr, (3, 3), 1)))
  45. stats.append(get_stats(arr, cv2.GaussianBlur(arr, (3, 3), 1.5)))
  46. stats.append(get_stats(arr, cv2.GaussianBlur(arr, (5, 5), 0.5)))
  47. stats.append(get_stats(arr, cv2.GaussianBlur(arr, (5, 5), 1)))
  48. stats.append(get_stats(arr, cv2.GaussianBlur(arr, (5, 5), 1.5)))
  49. stats.append(get_stats(arr, medfilt2d(arr, [3, 3])))
  50. stats.append(get_stats(arr, medfilt2d(arr, [5, 5])))
  51. stats.append(get_stats(arr, wiener(arr, [3, 3])))
  52. stats.append(get_stats(arr, wiener(arr, [5, 5])))
  53. wave = w2d(arr, 'db1', 2)
  54. stats.append(get_stats(arr, np.array(wave, 'float64')))
  55. data = []
  56. for stat in stats:
  57. data.append(stat[0])
  58. for stat in stats:
  59. data.append(stat[1])
  60. data = np.array(data)
  61. return data
  62. def w2d(arr, mode='haar', level=1):
  63. #convert to float
  64. imArray = arr
  65. np.divide(imArray, 255)
  66. # compute coefficients
  67. coeffs=pywt.wavedec2(imArray, mode, level=level)
  68. #Process Coefficients
  69. coeffs_H=list(coeffs)
  70. coeffs_H[0] *= 0
  71. # reconstruction
  72. imArray_H = pywt.waverec2(coeffs_H, mode)
  73. imArray_H *= 255
  74. imArray_H = np.uint8(imArray_H)
  75. return imArray_H
  76. def _get_mscn_variance(block, sub_block_size=(50, 50)):
  77. blocks = segmentation.divide_in_blocks(block, sub_block_size)
  78. data = []
  79. for block in blocks:
  80. mscn_coefficients = transform.get_mscn_coefficients(block)
  81. flat_coeff = mscn_coefficients.flatten()
  82. data.append(np.var(flat_coeff))
  83. return np.sort(data)