1use approx::{AbsDiffEq, RelativeEq, UlpsEq};
2use num::One;
3use std::cmp::Ordering;
4use std::fmt;
5use std::hash;
6#[cfg(feature = "abomonation-serialize")]
7use std::io::{Result as IOResult, Write};
8
9#[cfg(feature = "serde-serialize-no-std")]
10use serde::{Deserialize, Deserializer, Serialize, Serializer};
11
12#[cfg(feature = "abomonation-serialize")]
13use abomonation::Abomonation;
14
15use simba::simd::SimdPartialOrd;
16
17use crate::base::allocator::Allocator;
18use crate::base::dimension::{DimName, DimNameAdd, DimNameSum, U1};
19use crate::base::iter::{MatrixIter, MatrixIterMut};
20use crate::base::{Const, DefaultAllocator, OVector, Scalar};
21use std::mem::MaybeUninit;
22
23#[repr(C)]
43#[derive(Clone)]
44pub struct OPoint<T: Scalar, D: DimName>
45where
46 DefaultAllocator: Allocator<T, D>,
47{
48 pub coords: OVector<T, D>,
50}
51
52impl<T: Scalar + fmt::Debug, D: DimName> fmt::Debug for OPoint<T, D>
53where
54 DefaultAllocator: Allocator<T, D>,
55{
56 fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
57 self.coords.as_slice().fmt(formatter)
58 }
59}
60
61impl<T: Scalar + hash::Hash, D: DimName> hash::Hash for OPoint<T, D>
62where
63 DefaultAllocator: Allocator<T, D>,
64{
65 fn hash<H: hash::Hasher>(&self, state: &mut H) {
66 self.coords.hash(state)
67 }
68}
69
70impl<T: Scalar + Copy, D: DimName> Copy for OPoint<T, D>
71where
72 DefaultAllocator: Allocator<T, D>,
73 OVector<T, D>: Copy,
74{
75}
76
77#[cfg(all(not(target_os = "cuda"), feature = "cuda"))]
78unsafe impl<T: Scalar + cust::memory::DeviceCopy, D: DimName> cust::memory::DeviceCopy
79 for OPoint<T, D>
80where
81 DefaultAllocator: Allocator<T, D>,
82 OVector<T, D>: cust::memory::DeviceCopy,
83{
84}
85
86#[cfg(feature = "bytemuck")]
87unsafe impl<T: Scalar, D: DimName> bytemuck::Zeroable for OPoint<T, D>
88where
89 OVector<T, D>: bytemuck::Zeroable,
90 DefaultAllocator: Allocator<T, D>,
91{
92}
93
94#[cfg(feature = "bytemuck")]
95unsafe impl<T: Scalar, D: DimName> bytemuck::Pod for OPoint<T, D>
96where
97 T: Copy,
98 OVector<T, D>: bytemuck::Pod,
99 DefaultAllocator: Allocator<T, D>,
100{
101}
102
103#[cfg(feature = "serde-serialize-no-std")]
104impl<T: Scalar, D: DimName> Serialize for OPoint<T, D>
105where
106 DefaultAllocator: Allocator<T, D>,
107 <DefaultAllocator as Allocator<T, D>>::Buffer: Serialize,
108{
109 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
110 where
111 S: Serializer,
112 {
113 self.coords.serialize(serializer)
114 }
115}
116
117#[cfg(feature = "serde-serialize-no-std")]
118impl<'a, T: Scalar, D: DimName> Deserialize<'a> for OPoint<T, D>
119where
120 DefaultAllocator: Allocator<T, D>,
121 <DefaultAllocator as Allocator<T, D>>::Buffer: Deserialize<'a>,
122{
123 fn deserialize<Des>(deserializer: Des) -> Result<Self, Des::Error>
124 where
125 Des: Deserializer<'a>,
126 {
127 let coords = OVector::<T, D>::deserialize(deserializer)?;
128
129 Ok(Self::from(coords))
130 }
131}
132
133#[cfg(feature = "abomonation-serialize")]
134impl<T, D: DimName> Abomonation for OPoint<T, D>
135where
136 T: Scalar,
137 OVector<T, D>: Abomonation,
138 DefaultAllocator: Allocator<T, D>,
139{
140 unsafe fn entomb<W: Write>(&self, writer: &mut W) -> IOResult<()> {
141 self.coords.entomb(writer)
142 }
143
144 fn extent(&self) -> usize {
145 self.coords.extent()
146 }
147
148 unsafe fn exhume<'a, 'b>(&'a mut self, bytes: &'b mut [u8]) -> Option<&'b mut [u8]> {
149 self.coords.exhume(bytes)
150 }
151}
152
153impl<T: Scalar, D: DimName> OPoint<T, D>
154where
155 DefaultAllocator: Allocator<T, D>,
156{
157 #[inline]
170 #[must_use]
171 pub fn map<T2: Scalar, F: FnMut(T) -> T2>(&self, f: F) -> OPoint<T2, D>
172 where
173 DefaultAllocator: Allocator<T2, D>,
174 {
175 self.coords.map(f).into()
176 }
177
178 #[inline]
193 pub fn apply<F: FnMut(&mut T)>(&mut self, f: F) {
194 self.coords.apply(f)
195 }
196
197 #[inline]
213 #[must_use]
214 pub fn to_homogeneous(&self) -> OVector<T, DimNameSum<D, U1>>
215 where
216 T: One,
217 D: DimNameAdd<U1>,
218 DefaultAllocator: Allocator<T, DimNameSum<D, U1>>,
219 {
220 let len = self.len();
225 let mut res = crate::Matrix::uninit(DimNameSum::<D, U1>::name(), Const::<1>);
226 res.generic_slice_mut((0, 0), self.coords.shape_generic())
229 .zip_apply(&self.coords, |out, e| *out = MaybeUninit::new(e));
230 res[(len, 0)] = MaybeUninit::new(T::one());
231
232 unsafe { res.assume_init() }
234 }
235
236 #[deprecated(note = "Use Point::from(vector) instead.")]
238 #[inline]
239 pub fn from_coordinates(coords: OVector<T, D>) -> Self {
240 Self { coords }
241 }
242
243 #[inline]
256 #[must_use]
257 pub fn len(&self) -> usize {
258 self.coords.len()
259 }
260
261 #[inline]
270 #[must_use]
271 pub fn is_empty(&self) -> bool {
272 self.len() == 0
273 }
274
275 #[inline]
278 #[deprecated(note = "This methods is no longer significant and will always return 1.")]
279 pub fn stride(&self) -> usize {
280 self.coords.strides().0
281 }
282
283 #[inline]
296 pub fn iter(
297 &self,
298 ) -> MatrixIter<'_, T, D, Const<1>, <DefaultAllocator as Allocator<T, D>>::Buffer> {
299 self.coords.iter()
300 }
301
302 #[inline]
304 #[must_use]
305 pub unsafe fn get_unchecked(&self, i: usize) -> &T {
306 self.coords.vget_unchecked(i)
307 }
308
309 #[inline]
322 pub fn iter_mut(
323 &mut self,
324 ) -> MatrixIterMut<'_, T, D, Const<1>, <DefaultAllocator as Allocator<T, D>>::Buffer> {
325 self.coords.iter_mut()
326 }
327
328 #[inline]
330 #[must_use]
331 pub unsafe fn get_unchecked_mut(&mut self, i: usize) -> &mut T {
332 self.coords.vget_unchecked_mut(i)
333 }
334
335 #[inline]
337 pub unsafe fn swap_unchecked(&mut self, i1: usize, i2: usize) {
338 self.coords.swap_unchecked((i1, 0), (i2, 0))
339 }
340}
341
342impl<T: Scalar + AbsDiffEq, D: DimName> AbsDiffEq for OPoint<T, D>
343where
344 T::Epsilon: Clone,
345 DefaultAllocator: Allocator<T, D>,
346{
347 type Epsilon = T::Epsilon;
348
349 #[inline]
350 fn default_epsilon() -> Self::Epsilon {
351 T::default_epsilon()
352 }
353
354 #[inline]
355 fn abs_diff_eq(&self, other: &Self, epsilon: Self::Epsilon) -> bool {
356 self.coords.abs_diff_eq(&other.coords, epsilon)
357 }
358}
359
360impl<T: Scalar + RelativeEq, D: DimName> RelativeEq for OPoint<T, D>
361where
362 T::Epsilon: Clone,
363 DefaultAllocator: Allocator<T, D>,
364{
365 #[inline]
366 fn default_max_relative() -> Self::Epsilon {
367 T::default_max_relative()
368 }
369
370 #[inline]
371 fn relative_eq(
372 &self,
373 other: &Self,
374 epsilon: Self::Epsilon,
375 max_relative: Self::Epsilon,
376 ) -> bool {
377 self.coords
378 .relative_eq(&other.coords, epsilon, max_relative)
379 }
380}
381
382impl<T: Scalar + UlpsEq, D: DimName> UlpsEq for OPoint<T, D>
383where
384 T::Epsilon: Clone,
385 DefaultAllocator: Allocator<T, D>,
386{
387 #[inline]
388 fn default_max_ulps() -> u32 {
389 T::default_max_ulps()
390 }
391
392 #[inline]
393 fn ulps_eq(&self, other: &Self, epsilon: Self::Epsilon, max_ulps: u32) -> bool {
394 self.coords.ulps_eq(&other.coords, epsilon, max_ulps)
395 }
396}
397
398impl<T: Scalar + Eq, D: DimName> Eq for OPoint<T, D> where DefaultAllocator: Allocator<T, D> {}
399
400impl<T: Scalar, D: DimName> PartialEq for OPoint<T, D>
401where
402 DefaultAllocator: Allocator<T, D>,
403{
404 #[inline]
405 fn eq(&self, right: &Self) -> bool {
406 self.coords == right.coords
407 }
408}
409
410impl<T: Scalar + PartialOrd, D: DimName> PartialOrd for OPoint<T, D>
411where
412 DefaultAllocator: Allocator<T, D>,
413{
414 #[inline]
415 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
416 self.coords.partial_cmp(&other.coords)
417 }
418
419 #[inline]
420 fn lt(&self, right: &Self) -> bool {
421 self.coords.lt(&right.coords)
422 }
423
424 #[inline]
425 fn le(&self, right: &Self) -> bool {
426 self.coords.le(&right.coords)
427 }
428
429 #[inline]
430 fn gt(&self, right: &Self) -> bool {
431 self.coords.gt(&right.coords)
432 }
433
434 #[inline]
435 fn ge(&self, right: &Self) -> bool {
436 self.coords.ge(&right.coords)
437 }
438}
439
440impl<T: Scalar + SimdPartialOrd, D: DimName> OPoint<T, D>
444where
445 DefaultAllocator: Allocator<T, D>,
446{
447 #[inline]
449 #[must_use]
450 pub fn inf(&self, other: &Self) -> OPoint<T, D> {
451 self.coords.inf(&other.coords).into()
452 }
453
454 #[inline]
456 #[must_use]
457 pub fn sup(&self, other: &Self) -> OPoint<T, D> {
458 self.coords.sup(&other.coords).into()
459 }
460
461 #[inline]
463 #[must_use]
464 pub fn inf_sup(&self, other: &Self) -> (OPoint<T, D>, OPoint<T, D>) {
465 let (inf, sup) = self.coords.inf_sup(&other.coords);
466 (inf.into(), sup.into())
467 }
468}
469
470impl<T: Scalar + fmt::Display, D: DimName> fmt::Display for OPoint<T, D>
476where
477 DefaultAllocator: Allocator<T, D>,
478{
479 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
480 write!(f, "{{")?;
481
482 let mut it = self.coords.iter();
483
484 write!(f, "{}", *it.next().unwrap())?;
485
486 for comp in it {
487 write!(f, ", {}", *comp)?;
488 }
489
490 write!(f, "}}")
491 }
492}