1#[cfg(feature = "abomonation-serialize")]
2use std::io::{Result as IOResult, Write};
3
4#[cfg(all(feature = "alloc", not(feature = "std")))]
5use alloc::vec::Vec;
6
7use crate::base::allocator::Allocator;
8use crate::base::constraint::{SameNumberOfRows, ShapeConstraint};
9use crate::base::default_allocator::DefaultAllocator;
10use crate::base::dimension::{Dim, DimName, Dynamic, U1};
11use crate::base::storage::{IsContiguous, Owned, RawStorage, RawStorageMut, ReshapableStorage};
12use crate::base::{Scalar, Vector};
13
14#[cfg(feature = "serde-serialize-no-std")]
15use serde::{
16 de::{Deserialize, Deserializer, Error},
17 ser::{Serialize, Serializer},
18};
19
20use crate::Storage;
21#[cfg(feature = "abomonation-serialize")]
22use abomonation::Abomonation;
23use std::mem::MaybeUninit;
24
25#[repr(C)]
32#[derive(Eq, Debug, Clone, PartialEq)]
33pub struct VecStorage<T, R: Dim, C: Dim> {
34 data: Vec<T>,
35 nrows: R,
36 ncols: C,
37}
38
39#[cfg(feature = "serde-serialize")]
40impl<T, R: Dim, C: Dim> Serialize for VecStorage<T, R, C>
41where
42 T: Serialize,
43 R: Serialize,
44 C: Serialize,
45{
46 fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
47 where
48 Ser: Serializer,
49 {
50 (&self.data, &self.nrows, &self.ncols).serialize(serializer)
51 }
52}
53
54#[cfg(feature = "serde-serialize")]
55impl<'a, T, R: Dim, C: Dim> Deserialize<'a> for VecStorage<T, R, C>
56where
57 T: Deserialize<'a>,
58 R: Deserialize<'a>,
59 C: Deserialize<'a>,
60{
61 fn deserialize<Des>(deserializer: Des) -> Result<Self, Des::Error>
62 where
63 Des: Deserializer<'a>,
64 {
65 let (data, nrows, ncols): (Vec<T>, R, C) = Deserialize::deserialize(deserializer)?;
66
67 if nrows.value() * ncols.value() != data.len() {
70 return Err(Des::Error::custom(format!(
71 "Expected {} components, found {}",
72 nrows.value() * ncols.value(),
73 data.len()
74 )));
75 }
76
77 Ok(Self { data, nrows, ncols })
78 }
79}
80
81#[deprecated(note = "renamed to `VecStorage`")]
82pub type MatrixVec<T, R, C> = VecStorage<T, R, C>;
84
85impl<T, R: Dim, C: Dim> VecStorage<T, R, C> {
86 #[inline]
88 pub fn new(nrows: R, ncols: C, data: Vec<T>) -> Self {
89 assert!(
90 nrows.value() * ncols.value() == data.len(),
91 "Data storage buffer dimension mismatch."
92 );
93 Self { data, nrows, ncols }
94 }
95
96 #[inline]
98 #[must_use]
99 pub fn as_vec(&self) -> &Vec<T> {
100 &self.data
101 }
102
103 #[inline]
109 pub unsafe fn as_vec_mut(&mut self) -> &mut Vec<T> {
110 &mut self.data
111 }
112
113 #[inline]
120 pub unsafe fn resize(mut self, sz: usize) -> Vec<MaybeUninit<T>> {
121 let len = self.len();
122
123 let new_data = if sz < len {
124 self.data.set_len(sz);
127 self.data.shrink_to_fit();
128
129 Vec::from_raw_parts(
133 self.data.as_mut_ptr() as *mut MaybeUninit<T>,
134 self.data.len(),
135 self.data.capacity(),
136 )
137 } else {
138 self.data.reserve_exact(sz - len);
139
140 let mut new_data = Vec::from_raw_parts(
144 self.data.as_mut_ptr() as *mut MaybeUninit<T>,
145 self.data.len(),
146 self.data.capacity(),
147 );
148
149 new_data.set_len(sz);
152 new_data
153 };
154
155 std::mem::forget(self);
158 new_data
159 }
160
161 #[inline]
163 #[must_use]
164 pub fn len(&self) -> usize {
165 self.data.len()
166 }
167
168 #[inline]
170 #[must_use]
171 pub fn is_empty(&self) -> bool {
172 self.len() == 0
173 }
174
175 #[inline]
177 pub fn as_slice(&self) -> &[T] {
178 &self.data[..]
179 }
180
181 #[inline]
183 pub fn as_mut_slice(&mut self) -> &mut [T] {
184 &mut self.data[..]
185 }
186}
187
188impl<T, R: Dim, C: Dim> From<VecStorage<T, R, C>> for Vec<T> {
189 fn from(vec: VecStorage<T, R, C>) -> Self {
190 vec.data
191 }
192}
193
194unsafe impl<T, C: Dim> RawStorage<T, Dynamic, C> for VecStorage<T, Dynamic, C> {
201 type RStride = U1;
202 type CStride = Dynamic;
203
204 #[inline]
205 fn ptr(&self) -> *const T {
206 self.data.as_ptr()
207 }
208
209 #[inline]
210 fn shape(&self) -> (Dynamic, C) {
211 (self.nrows, self.ncols)
212 }
213
214 #[inline]
215 fn strides(&self) -> (Self::RStride, Self::CStride) {
216 (Self::RStride::name(), self.nrows)
217 }
218
219 #[inline]
220 fn is_contiguous(&self) -> bool {
221 true
222 }
223
224 #[inline]
225 unsafe fn as_slice_unchecked(&self) -> &[T] {
226 &self.data
227 }
228}
229
230unsafe impl<T: Scalar, C: Dim> Storage<T, Dynamic, C> for VecStorage<T, Dynamic, C>
231where
232 DefaultAllocator: Allocator<T, Dynamic, C, Buffer = Self>,
233{
234 #[inline]
235 fn into_owned(self) -> Owned<T, Dynamic, C>
236 where
237 DefaultAllocator: Allocator<T, Dynamic, C>,
238 {
239 self
240 }
241
242 #[inline]
243 fn clone_owned(&self) -> Owned<T, Dynamic, C>
244 where
245 DefaultAllocator: Allocator<T, Dynamic, C>,
246 {
247 self.clone()
248 }
249}
250
251unsafe impl<T, R: DimName> RawStorage<T, R, Dynamic> for VecStorage<T, R, Dynamic> {
252 type RStride = U1;
253 type CStride = R;
254
255 #[inline]
256 fn ptr(&self) -> *const T {
257 self.data.as_ptr()
258 }
259
260 #[inline]
261 fn shape(&self) -> (R, Dynamic) {
262 (self.nrows, self.ncols)
263 }
264
265 #[inline]
266 fn strides(&self) -> (Self::RStride, Self::CStride) {
267 (Self::RStride::name(), self.nrows)
268 }
269
270 #[inline]
271 fn is_contiguous(&self) -> bool {
272 true
273 }
274
275 #[inline]
276 unsafe fn as_slice_unchecked(&self) -> &[T] {
277 &self.data
278 }
279}
280
281unsafe impl<T: Scalar, R: DimName> Storage<T, R, Dynamic> for VecStorage<T, R, Dynamic>
282where
283 DefaultAllocator: Allocator<T, R, Dynamic, Buffer = Self>,
284{
285 #[inline]
286 fn into_owned(self) -> Owned<T, R, Dynamic>
287 where
288 DefaultAllocator: Allocator<T, R, Dynamic>,
289 {
290 self
291 }
292
293 #[inline]
294 fn clone_owned(&self) -> Owned<T, R, Dynamic>
295 where
296 DefaultAllocator: Allocator<T, R, Dynamic>,
297 {
298 self.clone()
299 }
300}
301
302unsafe impl<T, C: Dim> RawStorageMut<T, Dynamic, C> for VecStorage<T, Dynamic, C> {
308 #[inline]
309 fn ptr_mut(&mut self) -> *mut T {
310 self.data.as_mut_ptr()
311 }
312
313 #[inline]
314 unsafe fn as_mut_slice_unchecked(&mut self) -> &mut [T] {
315 &mut self.data[..]
316 }
317}
318
319unsafe impl<T, R: Dim, C: Dim> IsContiguous for VecStorage<T, R, C> {}
320
321impl<T, C1, C2> ReshapableStorage<T, Dynamic, C1, Dynamic, C2> for VecStorage<T, Dynamic, C1>
322where
323 T: Scalar,
324 C1: Dim,
325 C2: Dim,
326{
327 type Output = VecStorage<T, Dynamic, C2>;
328
329 fn reshape_generic(self, nrows: Dynamic, ncols: C2) -> Self::Output {
330 assert_eq!(nrows.value() * ncols.value(), self.data.len());
331 VecStorage {
332 data: self.data,
333 nrows,
334 ncols,
335 }
336 }
337}
338
339impl<T, C1, R2> ReshapableStorage<T, Dynamic, C1, R2, Dynamic> for VecStorage<T, Dynamic, C1>
340where
341 T: Scalar,
342 C1: Dim,
343 R2: DimName,
344{
345 type Output = VecStorage<T, R2, Dynamic>;
346
347 fn reshape_generic(self, nrows: R2, ncols: Dynamic) -> Self::Output {
348 assert_eq!(nrows.value() * ncols.value(), self.data.len());
349 VecStorage {
350 data: self.data,
351 nrows,
352 ncols,
353 }
354 }
355}
356
357unsafe impl<T, R: DimName> RawStorageMut<T, R, Dynamic> for VecStorage<T, R, Dynamic> {
358 #[inline]
359 fn ptr_mut(&mut self) -> *mut T {
360 self.data.as_mut_ptr()
361 }
362
363 #[inline]
364 unsafe fn as_mut_slice_unchecked(&mut self) -> &mut [T] {
365 &mut self.data[..]
366 }
367}
368
369impl<T, R1, C2> ReshapableStorage<T, R1, Dynamic, Dynamic, C2> for VecStorage<T, R1, Dynamic>
370where
371 T: Scalar,
372 R1: DimName,
373 C2: Dim,
374{
375 type Output = VecStorage<T, Dynamic, C2>;
376
377 fn reshape_generic(self, nrows: Dynamic, ncols: C2) -> Self::Output {
378 assert_eq!(nrows.value() * ncols.value(), self.data.len());
379 VecStorage {
380 data: self.data,
381 nrows,
382 ncols,
383 }
384 }
385}
386
387impl<T, R1, R2> ReshapableStorage<T, R1, Dynamic, R2, Dynamic> for VecStorage<T, R1, Dynamic>
388where
389 T: Scalar,
390 R1: DimName,
391 R2: DimName,
392{
393 type Output = VecStorage<T, R2, Dynamic>;
394
395 fn reshape_generic(self, nrows: R2, ncols: Dynamic) -> Self::Output {
396 assert_eq!(nrows.value() * ncols.value(), self.data.len());
397 VecStorage {
398 data: self.data,
399 nrows,
400 ncols,
401 }
402 }
403}
404
405#[cfg(feature = "abomonation-serialize")]
406impl<T: Abomonation, R: Dim, C: Dim> Abomonation for VecStorage<T, R, C> {
407 unsafe fn entomb<W: Write>(&self, writer: &mut W) -> IOResult<()> {
408 self.data.entomb(writer)
409 }
410
411 unsafe fn exhume<'a, 'b>(&'a mut self, bytes: &'b mut [u8]) -> Option<&'b mut [u8]> {
412 self.data.exhume(bytes)
413 }
414
415 fn extent(&self) -> usize {
416 self.data.extent()
417 }
418}
419
420impl<T, R: Dim> Extend<T> for VecStorage<T, R, Dynamic> {
421 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
429 self.data.extend(iter);
430 self.ncols = Dynamic::new(self.data.len() / self.nrows.value());
431 assert!(self.data.len() % self.nrows.value() == 0,
432 "The number of elements produced by the given iterator was not a multiple of the number of rows.");
433 }
434}
435
436impl<'a, T: 'a + Copy, R: Dim> Extend<&'a T> for VecStorage<T, R, Dynamic> {
437 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
445 self.extend(iter.into_iter().copied())
446 }
447}
448
449impl<T, R, RV, SV> Extend<Vector<T, RV, SV>> for VecStorage<T, R, Dynamic>
450where
451 T: Scalar,
452 R: Dim,
453 RV: Dim,
454 SV: RawStorage<T, RV>,
455 ShapeConstraint: SameNumberOfRows<R, RV>,
456{
457 fn extend<I: IntoIterator<Item = Vector<T, RV, SV>>>(&mut self, iter: I) {
465 let nrows = self.nrows.value();
466 let iter = iter.into_iter();
467 let (lower, _upper) = iter.size_hint();
468 self.data.reserve(nrows * lower);
469 for vector in iter {
470 assert_eq!(nrows, vector.shape().0);
471 self.data.extend(vector.iter().cloned());
472 }
473 self.ncols = Dynamic::new(self.data.len() / nrows);
474 }
475}
476
477impl<T> Extend<T> for VecStorage<T, Dynamic, U1> {
478 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
481 self.data.extend(iter);
482 self.nrows = Dynamic::new(self.data.len());
483 }
484}