10 #ifndef EIGEN_SPARSEVECTOR_H
11 #define EIGEN_SPARSEVECTOR_H
29 template<
typename _Scalar,
int _Options,
typename _Index>
30 struct traits<SparseVector<_Scalar, _Options, _Index> >
32 typedef _Scalar Scalar;
34 typedef Sparse StorageKind;
35 typedef MatrixXpr XprKind;
39 RowsAtCompileTime = IsColVector ?
Dynamic : 1,
40 ColsAtCompileTime = IsColVector ? 1 : Dynamic,
41 MaxRowsAtCompileTime = RowsAtCompileTime,
42 MaxColsAtCompileTime = ColsAtCompileTime,
44 CoeffReadCost = NumTraits<Scalar>::ReadCost,
45 SupportedAccessPatterns = InnerRandomAccessPattern
50 template<
typename _Scalar,
int _Options,
typename _Index>
52 :
public SparseMatrixBase<SparseVector<_Scalar, _Options, _Index> >
55 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
56 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
57 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
62 typedef SparseMatrixBase<SparseVector> SparseBase;
63 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
69 internal::CompressedStorage<Scalar,Index> m_data;
72 internal::CompressedStorage<Scalar,Index>& _data() {
return m_data; }
73 internal::CompressedStorage<Scalar,Index>& _data()
const {
return m_data; }
77 EIGEN_STRONG_INLINE Index rows()
const {
return IsColVector ? m_size : 1; }
78 EIGEN_STRONG_INLINE Index cols()
const {
return IsColVector ? 1 : m_size; }
79 EIGEN_STRONG_INLINE Index innerSize()
const {
return m_size; }
80 EIGEN_STRONG_INLINE Index outerSize()
const {
return 1; }
82 EIGEN_STRONG_INLINE
const Scalar* valuePtr()
const {
return &m_data.value(0); }
83 EIGEN_STRONG_INLINE Scalar* valuePtr() {
return &m_data.value(0); }
85 EIGEN_STRONG_INLINE
const Index* innerIndexPtr()
const {
return &m_data.index(0); }
86 EIGEN_STRONG_INLINE Index* innerIndexPtr() {
return &m_data.index(0); }
88 inline Scalar coeff(Index
row, Index
col)
const
90 eigen_assert((IsColVector ? col : row)==0);
91 return coeff(IsColVector ? row : col);
93 inline Scalar coeff(Index i)
const {
return m_data.at(i); }
95 inline Scalar& coeffRef(Index row, Index col)
97 eigen_assert((IsColVector ? col : row)==0);
98 return coeff(IsColVector ? row : col);
107 inline Scalar& coeffRef(Index i)
109 return m_data.atWithInsertion(i);
115 class ReverseInnerIterator;
117 inline void setZero() { m_data.clear(); }
120 inline Index
nonZeros()
const {
return static_cast<Index
>(m_data.size()); }
122 inline void startVec(Index outer)
124 EIGEN_UNUSED_VARIABLE(outer);
125 eigen_assert(outer==0);
128 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
130 EIGEN_UNUSED_VARIABLE(outer);
131 eigen_assert(outer==0);
132 return insertBack(inner);
134 inline Scalar& insertBack(Index i)
137 return m_data.value(m_data.size()-1);
140 inline Scalar& insert(Index row, Index col)
142 Index inner = IsColVector ? row :
col;
143 Index outer = IsColVector ? col :
row;
144 eigen_assert(outer==0);
145 return insert(inner);
147 Scalar& insert(Index i)
150 Index p = Index(m_data.size()) - 1;
152 m_data.resize(p+2,1);
154 while ( (p >= startId) && (m_data.index(p) > i) )
156 m_data.index(p+1) = m_data.index(p);
157 m_data.value(p+1) = m_data.value(p);
160 m_data.index(p+1) = i;
161 m_data.value(p+1) = 0;
162 return m_data.value(p+1);
167 inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
170 inline void finalize() {}
172 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
174 m_data.prune(reference,epsilon);
177 void resize(Index rows, Index cols)
179 eigen_assert(rows==1 || cols==1);
180 resize(IsColVector ? rows : cols);
183 void resize(Index newSize)
189 void resizeNonZeros(Index
size) { m_data.resize(size); }
191 inline SparseVector() : m_size(0) { resize(0); }
193 inline SparseVector(Index size) : m_size(0) { resize(size); }
195 inline SparseVector(Index rows, Index cols) : m_size(0) { resize(rows,cols); }
197 template<
typename OtherDerived>
198 inline SparseVector(
const SparseMatrixBase<OtherDerived>& other)
201 *
this = other.derived();
204 inline SparseVector(
const SparseVector& other)
205 : SparseBase(other), m_size(0)
207 *
this = other.derived();
210 inline void swap(SparseVector& other)
212 std::swap(m_size, other.m_size);
213 m_data.swap(other.m_data);
216 inline SparseVector& operator=(
const SparseVector& other)
218 if (other.isRValue())
220 swap(other.const_cast_derived());
224 resize(other.size());
225 m_data = other.m_data;
230 template<
typename OtherDerived>
231 inline SparseVector& operator=(
const SparseMatrixBase<OtherDerived>& other)
233 if ( (
bool(OtherDerived::IsVectorAtCompileTime) &&
int(
RowsAtCompileTime)!=
int(OtherDerived::RowsAtCompileTime))
234 || ((!
bool(OtherDerived::IsVectorAtCompileTime)) && (
bool(IsColVector) ? other.cols()>1 : other.rows()>1 )))
235 return assign(other.transpose());
237 return assign(other);
240 #ifndef EIGEN_PARSED_BY_DOXYGEN
241 template<
typename Lhs,
typename Rhs>
242 inline SparseVector& operator=(
const SparseSparseProduct<Lhs,Rhs>& product)
244 return Base::operator=(product);
248 friend std::ostream & operator << (std::ostream & s,
const SparseVector& m)
250 for (Index i=0; i<m.nonZeros(); ++i)
251 s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
268 m_data.reserve(reserve);
272 EIGEN_DEPRECATED Scalar&
fill(Index r, Index c)
274 eigen_assert(r==0 || c==0);
275 return fill(IsColVector ? r : c);
279 EIGEN_DEPRECATED Scalar&
fill(Index i)
282 return m_data.value(m_data.size()-1);
286 EIGEN_DEPRECATED Scalar&
fillrand(Index r, Index c)
288 eigen_assert(r==0 || c==0);
289 return fillrand(IsColVector ? r : c);
301 # ifdef EIGEN_SPARSEVECTOR_PLUGIN
302 # include EIGEN_SPARSEVECTOR_PLUGIN
306 template<
typename OtherDerived>
309 const OtherDerived& other(_other.
derived());
313 Index size = other.size();
314 Index nnz = other.nonZeros();
317 for(Index i=0; i<
size; ++i)
319 typename OtherDerived::InnerIterator it(other, i);
321 insert(i) = it.value();
328 return Base::operator=(other);
333 template<
typename Scalar,
int _Options,
typename _Index>
334 class SparseVector<Scalar,_Options,_Index>::InnerIterator
337 InnerIterator(
const SparseVector& vec, Index outer=0)
338 : m_data(vec.m_data), m_id(0), m_end(static_cast<Index>(m_data.size()))
340 EIGEN_UNUSED_VARIABLE(outer);
341 eigen_assert(outer==0);
344 InnerIterator(
const internal::CompressedStorage<Scalar,Index>& data)
345 : m_data(data), m_id(0), m_end(static_cast<Index>(m_data.size()))
348 inline InnerIterator& operator++() { m_id++;
return *
this; }
350 inline Scalar value()
const {
return m_data.value(m_id); }
351 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_data.value(m_id)); }
353 inline Index index()
const {
return m_data.index(m_id); }
354 inline Index
row()
const {
return IsColVector ? index() : 0; }
355 inline Index
col()
const {
return IsColVector ? 0 : index(); }
357 inline operator bool()
const {
return (m_id < m_end); }
360 const internal::CompressedStorage<Scalar,Index>& m_data;
365 template<
typename Scalar,
int _Options,
typename _Index>
366 class SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
369 ReverseInnerIterator(
const SparseVector& vec, Index outer=0)
370 : m_data(vec.m_data), m_id(static_cast<Index>(m_data.size())), m_start(0)
372 EIGEN_UNUSED_VARIABLE(outer);
373 eigen_assert(outer==0);
376 ReverseInnerIterator(
const internal::CompressedStorage<Scalar,Index>& data)
377 : m_data(data), m_id(static_cast<Index>(m_data.size())), m_start(0)
380 inline ReverseInnerIterator& operator--() { m_id--;
return *
this; }
382 inline Scalar value()
const {
return m_data.value(m_id-1); }
383 inline Scalar& valueRef() {
return const_cast<Scalar&
>(m_data.value(m_id-1)); }
385 inline Index index()
const {
return m_data.index(m_id-1); }
386 inline Index
row()
const {
return IsColVector ? index() : 0; }
387 inline Index
col()
const {
return IsColVector ? 0 : index(); }
389 inline operator bool()
const {
return (m_id > m_start); }
392 const internal::CompressedStorage<Scalar,Index>& m_data;
399 #endif // EIGEN_SPARSEVECTOR_H