CompressedStorage.h 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258
  1. // This file is part of Eigen, a lightweight C++ template library
  2. // for linear algebra.
  3. //
  4. // Copyright (C) 2008-2014 Gael Guennebaud <gael.guennebaud@inria.fr>
  5. //
  6. // This Source Code Form is subject to the terms of the Mozilla
  7. // Public License v. 2.0. If a copy of the MPL was not distributed
  8. // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
  9. #ifndef EIGEN_COMPRESSED_STORAGE_H
  10. #define EIGEN_COMPRESSED_STORAGE_H
  11. namespace Eigen {
  12. namespace internal {
  13. /** \internal
  14. * Stores a sparse set of values as a list of values and a list of indices.
  15. *
  16. */
  17. template<typename _Scalar,typename _StorageIndex>
  18. class CompressedStorage
  19. {
  20. public:
  21. typedef _Scalar Scalar;
  22. typedef _StorageIndex StorageIndex;
  23. protected:
  24. typedef typename NumTraits<Scalar>::Real RealScalar;
  25. public:
  26. CompressedStorage()
  27. : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
  28. {}
  29. explicit CompressedStorage(Index size)
  30. : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
  31. {
  32. resize(size);
  33. }
  34. CompressedStorage(const CompressedStorage& other)
  35. : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
  36. {
  37. *this = other;
  38. }
  39. CompressedStorage& operator=(const CompressedStorage& other)
  40. {
  41. resize(other.size());
  42. if(other.size()>0)
  43. {
  44. internal::smart_copy(other.m_values, other.m_values + m_size, m_values);
  45. internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices);
  46. }
  47. return *this;
  48. }
  49. void swap(CompressedStorage& other)
  50. {
  51. std::swap(m_values, other.m_values);
  52. std::swap(m_indices, other.m_indices);
  53. std::swap(m_size, other.m_size);
  54. std::swap(m_allocatedSize, other.m_allocatedSize);
  55. }
  56. ~CompressedStorage()
  57. {
  58. delete[] m_values;
  59. delete[] m_indices;
  60. }
  61. void reserve(Index size)
  62. {
  63. Index newAllocatedSize = m_size + size;
  64. if (newAllocatedSize > m_allocatedSize)
  65. reallocate(newAllocatedSize);
  66. }
  67. void squeeze()
  68. {
  69. if (m_allocatedSize>m_size)
  70. reallocate(m_size);
  71. }
  72. void resize(Index size, double reserveSizeFactor = 0)
  73. {
  74. if (m_allocatedSize<size)
  75. {
  76. Index realloc_size = (std::min<Index>)(NumTraits<StorageIndex>::highest(), size + Index(reserveSizeFactor*double(size)));
  77. if(realloc_size<size)
  78. internal::throw_std_bad_alloc();
  79. reallocate(realloc_size);
  80. }
  81. m_size = size;
  82. }
  83. void append(const Scalar& v, Index i)
  84. {
  85. Index id = m_size;
  86. resize(m_size+1, 1);
  87. m_values[id] = v;
  88. m_indices[id] = internal::convert_index<StorageIndex>(i);
  89. }
  90. inline Index size() const { return m_size; }
  91. inline Index allocatedSize() const { return m_allocatedSize; }
  92. inline void clear() { m_size = 0; }
  93. const Scalar* valuePtr() const { return m_values; }
  94. Scalar* valuePtr() { return m_values; }
  95. const StorageIndex* indexPtr() const { return m_indices; }
  96. StorageIndex* indexPtr() { return m_indices; }
  97. inline Scalar& value(Index i) { eigen_internal_assert(m_values!=0); return m_values[i]; }
  98. inline const Scalar& value(Index i) const { eigen_internal_assert(m_values!=0); return m_values[i]; }
  99. inline StorageIndex& index(Index i) { eigen_internal_assert(m_indices!=0); return m_indices[i]; }
  100. inline const StorageIndex& index(Index i) const { eigen_internal_assert(m_indices!=0); return m_indices[i]; }
  101. /** \returns the largest \c k such that for all \c j in [0,k) index[\c j]\<\a key */
  102. inline Index searchLowerIndex(Index key) const
  103. {
  104. return searchLowerIndex(0, m_size, key);
  105. }
  106. /** \returns the largest \c k in [start,end) such that for all \c j in [start,k) index[\c j]\<\a key */
  107. inline Index searchLowerIndex(Index start, Index end, Index key) const
  108. {
  109. while(end>start)
  110. {
  111. Index mid = (end+start)>>1;
  112. if (m_indices[mid]<key)
  113. start = mid+1;
  114. else
  115. end = mid;
  116. }
  117. return start;
  118. }
  119. /** \returns the stored value at index \a key
  120. * If the value does not exist, then the value \a defaultValue is returned without any insertion. */
  121. inline Scalar at(Index key, const Scalar& defaultValue = Scalar(0)) const
  122. {
  123. if (m_size==0)
  124. return defaultValue;
  125. else if (key==m_indices[m_size-1])
  126. return m_values[m_size-1];
  127. // ^^ optimization: let's first check if it is the last coefficient
  128. // (very common in high level algorithms)
  129. const Index id = searchLowerIndex(0,m_size-1,key);
  130. return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
  131. }
  132. /** Like at(), but the search is performed in the range [start,end) */
  133. inline Scalar atInRange(Index start, Index end, Index key, const Scalar &defaultValue = Scalar(0)) const
  134. {
  135. if (start>=end)
  136. return defaultValue;
  137. else if (end>start && key==m_indices[end-1])
  138. return m_values[end-1];
  139. // ^^ optimization: let's first check if it is the last coefficient
  140. // (very common in high level algorithms)
  141. const Index id = searchLowerIndex(start,end-1,key);
  142. return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
  143. }
  144. /** \returns a reference to the value at index \a key
  145. * If the value does not exist, then the value \a defaultValue is inserted
  146. * such that the keys are sorted. */
  147. inline Scalar& atWithInsertion(Index key, const Scalar& defaultValue = Scalar(0))
  148. {
  149. Index id = searchLowerIndex(0,m_size,key);
  150. if (id>=m_size || m_indices[id]!=key)
  151. {
  152. if (m_allocatedSize<m_size+1)
  153. {
  154. m_allocatedSize = 2*(m_size+1);
  155. internal::scoped_array<Scalar> newValues(m_allocatedSize);
  156. internal::scoped_array<StorageIndex> newIndices(m_allocatedSize);
  157. // copy first chunk
  158. internal::smart_copy(m_values, m_values +id, newValues.ptr());
  159. internal::smart_copy(m_indices, m_indices+id, newIndices.ptr());
  160. // copy the rest
  161. if(m_size>id)
  162. {
  163. internal::smart_copy(m_values +id, m_values +m_size, newValues.ptr() +id+1);
  164. internal::smart_copy(m_indices+id, m_indices+m_size, newIndices.ptr()+id+1);
  165. }
  166. std::swap(m_values,newValues.ptr());
  167. std::swap(m_indices,newIndices.ptr());
  168. }
  169. else if(m_size>id)
  170. {
  171. internal::smart_memmove(m_values +id, m_values +m_size, m_values +id+1);
  172. internal::smart_memmove(m_indices+id, m_indices+m_size, m_indices+id+1);
  173. }
  174. m_size++;
  175. m_indices[id] = internal::convert_index<StorageIndex>(key);
  176. m_values[id] = defaultValue;
  177. }
  178. return m_values[id];
  179. }
  180. void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
  181. {
  182. Index k = 0;
  183. Index n = size();
  184. for (Index i=0; i<n; ++i)
  185. {
  186. if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
  187. {
  188. value(k) = value(i);
  189. index(k) = index(i);
  190. ++k;
  191. }
  192. }
  193. resize(k,0);
  194. }
  195. protected:
  196. inline void reallocate(Index size)
  197. {
  198. #ifdef EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
  199. EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
  200. #endif
  201. eigen_internal_assert(size!=m_allocatedSize);
  202. internal::scoped_array<Scalar> newValues(size);
  203. internal::scoped_array<StorageIndex> newIndices(size);
  204. Index copySize = (std::min)(size, m_size);
  205. if (copySize>0) {
  206. internal::smart_copy(m_values, m_values+copySize, newValues.ptr());
  207. internal::smart_copy(m_indices, m_indices+copySize, newIndices.ptr());
  208. }
  209. std::swap(m_values,newValues.ptr());
  210. std::swap(m_indices,newIndices.ptr());
  211. m_allocatedSize = size;
  212. }
  213. protected:
  214. Scalar* m_values;
  215. StorageIndex* m_indices;
  216. Index m_size;
  217. Index m_allocatedSize;
  218. };
  219. } // end namespace internal
  220. } // end namespace Eigen
  221. #endif // EIGEN_COMPRESSED_STORAGE_H