ncollide3d/transformation/
hacd.rs

1use crate::bounding_volume::{self, BoundingVolume, AABB};
2use crate::math::Isometry;
3use crate::num::{Bounded, Zero};
4use crate::partitioning::{BVH, BVT};
5use crate::procedural::{IndexBuffer, TriMesh};
6use crate::query::algorithms::VoronoiSimplex;
7use crate::query::{
8    self, visitors::BoundingVolumeInterferencesCollector, Ray, RayCast, RayIntersection,
9};
10use crate::shape::SupportMap;
11use crate::transformation;
12use crate::utils;
13use na::{self, Point3, RealField, Translation3, Vector2, Vector3};
14use std::cmp::Ordering;
15use std::collections::hash_map::Entry;
16use std::collections::{BinaryHeap, HashMap, HashSet};
17use std::mem;
18
19/// Approximate convex decomposition of a triangle mesh.
20pub fn hacd<N: RealField + Copy>(
21    mesh: TriMesh<N>,
22    error: N,
23    min_components: usize,
24) -> (Vec<TriMesh<N>>, Vec<Vec<usize>>) {
25    assert!(
26        mesh.normals.is_some(),
27        "Vertex normals are required to compute the convex decomposition."
28    );
29
30    let mut mesh = mesh;
31
32    let mut edges = BinaryHeap::new();
33    let (center, diag) = normalize(&mut mesh);
34    let (rays, raymap) = compute_rays(&mesh);
35    let mut dual_graph = compute_dual_graph(&mesh, &raymap);
36    let bvt = compute_ray_bvt(&rays[..]);
37
38    /*
39     * Initialize the binary heap.
40     */
41    for (i, v) in dual_graph.iter().enumerate() {
42        for n in v.neighbors.as_ref().unwrap().iter() {
43            if i < *n {
44                let edge = DualGraphEdge::new(0, i, *n, &dual_graph[..], &mesh.coords[..], error);
45
46                edges.push(edge);
47            }
48        }
49    }
50
51    /*
52     * Decimation.
53     */
54    let mut curr_time = 0;
55
56    while dual_graph.len() - curr_time > min_components {
57        let to_add = match edges.pop() {
58            None => break,
59            Some(mut top) => {
60                if !top.is_valid(&dual_graph[..]) {
61                    continue; // this edge has been invalidated.
62                }
63
64                if !top.exact {
65                    // the cost is just an upper bound.
66                    let _max: N = N::max_value().unwrap();
67                    let mut top_cost = -_max;
68
69                    loop {
70                        let remove = edges.peek().map_or(false, |ref e| {
71                            if !top.is_valid(&dual_graph[..]) {
72                                true
73                            } else {
74                                top_cost = e.mcost;
75                                false
76                            }
77                        });
78
79                        if remove {
80                            let _ = edges.pop();
81                        } else {
82                            break;
83                        }
84                    }
85
86                    // Compute a bounded decimation cost.
87                    top.compute_decimation_cost(&dual_graph[..], &rays[..], &bvt, -top_cost, error);
88
89                    if top.concavity > error {
90                        continue;
91                    }
92
93                    if top.mcost < top_cost {
94                        // we are not the greatest cost any more.
95                        edges.push(top);
96                        continue;
97                    }
98                }
99
100                // Ensure the exact decimation cost (up to the maximum concavity) has been computed.
101                top.compute_decimation_cost(
102                    &dual_graph[..],
103                    &rays[..],
104                    &bvt,
105                    N::max_value().unwrap(),
106                    error,
107                );
108
109                if top.concavity > error {
110                    continue;
111                }
112
113                curr_time = curr_time + 1;
114
115                let v1 = top.v1;
116                let v2 = top.v2;
117
118                DualGraphVertex::hecol(top, &mut dual_graph[..]);
119
120                assert!(dual_graph[v1].timestamp != usize::max_value());
121                assert!(dual_graph[v2].timestamp != usize::max_value());
122                dual_graph[v1].timestamp = curr_time;
123                dual_graph[v2].timestamp = Bounded::max_value(); // Mark as invalid.
124
125                v1
126            }
127        };
128
129        for n in dual_graph[to_add].neighbors.as_ref().unwrap().iter() {
130            let edge = DualGraphEdge::new(
131                curr_time,
132                to_add,
133                *n,
134                &dual_graph[..],
135                &mesh.coords[..],
136                error,
137            );
138
139            edges.push(edge);
140        }
141    }
142
143    /*
144     * Build the decomposition from the remaining vertices.
145     */
146    let mut result = Vec::with_capacity(dual_graph.len() - curr_time);
147    let mut parts = Vec::with_capacity(dual_graph.len() - curr_time);
148
149    for vertex in dual_graph.into_iter() {
150        if vertex.timestamp != usize::max_value() {
151            let mut chull = vertex.chull.unwrap();
152
153            denormalize(&mut chull, &center, diag);
154            result.push(chull);
155            parts.push(vertex.parts.unwrap());
156        }
157    }
158
159    (result, parts)
160}
161
162fn normalize<N: RealField + Copy>(mesh: &mut TriMesh<N>) -> (Point3<N>, N) {
163    let aabb = bounding_volume::local_point_cloud_aabb(&mesh.coords[..]);
164    let diag = na::distance(&aabb.mins, &aabb.maxs);
165    let center = aabb.center();
166
167    mesh.translate_by(&Translation3::from(-center.coords));
168    let _1: N = na::one();
169    mesh.scale_by_scalar(_1 / diag);
170
171    (center, diag)
172}
173
174fn denormalize<N: RealField + Copy>(mesh: &mut TriMesh<N>, center: &Point3<N>, diag: N) {
175    mesh.scale_by_scalar(diag);
176    mesh.translate_by(&Translation3::from(center.coords));
177}
178
179struct DualGraphVertex<N: RealField + Copy> {
180    // XXX: Loss of determinism because of the randomized HashSet.
181    neighbors: Option<HashSet<usize>>,
182    // vertices adjascent to this one.
183    ancestors: Option<Vec<VertexWithConcavity<N>>>,
184    // faces from the original surface.
185    uancestors: Option<HashSet<usize>>,
186    border: Option<HashSet<Vector2<usize>>>,
187    chull: Option<TriMesh<N>>,
188    parts: Option<Vec<usize>>,
189    timestamp: usize,
190    concavity: N,
191    area: N,
192    aabb: AABB<N>,
193}
194
195impl<N: RealField + Copy> DualGraphVertex<N> {
196    pub fn new(
197        ancestor: usize,
198        mesh: &TriMesh<N>,
199        raymap: &HashMap<(u32, u32), usize>,
200    ) -> DualGraphVertex<N> {
201        let (idx, ns) = match mesh.indices {
202            IndexBuffer::Unified(ref idx) => (idx[ancestor].clone(), idx[ancestor].clone()),
203            IndexBuffer::Split(ref idx) => {
204                let t = idx[ancestor];
205                (
206                    Point3::new(t.x.x, t.y.x, t.z.x),
207                    Point3::new(t.x.y, t.y.y, t.z.y),
208                )
209            }
210        };
211
212        let triangle = vec![
213            mesh.coords[idx.x as usize],
214            mesh.coords[idx.y as usize],
215            mesh.coords[idx.z as usize],
216        ];
217
218        let area = utils::triangle_area(&triangle[0], &triangle[1], &triangle[2]);
219        let aabb = bounding_volume::local_point_cloud_aabb(&triangle[..]);
220
221        let chull = TriMesh::new(triangle, None, None, None);
222
223        let r1 = raymap.get(&(idx.x, ns.x)).unwrap().clone();
224        let r2 = raymap.get(&(idx.y, ns.y)).unwrap().clone();
225        let r3 = raymap.get(&(idx.z, ns.z)).unwrap().clone();
226
227        let ancestors = vec![
228            VertexWithConcavity::new(r1, na::zero()),
229            VertexWithConcavity::new(r2, na::zero()),
230            VertexWithConcavity::new(r3, na::zero()),
231        ];
232        let mut uancestors = HashSet::new();
233        let _ = uancestors.insert(r1);
234        let _ = uancestors.insert(r2);
235        let _ = uancestors.insert(r3);
236
237        let mut border = HashSet::new();
238        let _ = border.insert(edge(idx.x, idx.y));
239        let _ = border.insert(edge(idx.y, idx.z));
240        let _ = border.insert(edge(idx.z, idx.x));
241
242        DualGraphVertex {
243            neighbors: Some(HashSet::new()),
244            ancestors: Some(ancestors),
245            uancestors: Some(uancestors),
246            parts: Some(vec![ancestor]),
247            border: Some(border),
248            timestamp: 0,
249            chull: Some(chull),
250            concavity: na::zero(),
251            area: area,
252            aabb: aabb,
253        }
254    }
255
256    pub fn hecol(mut edge: DualGraphEdge<N>, graph: &mut [DualGraphVertex<N>]) {
257        assert!(edge.exact);
258
259        let valid = edge.v1;
260        let other = edge.v2;
261
262        graph[other].ancestors = None;
263
264        let other_neighbors = graph[other].neighbors.take().unwrap();
265        let other_parts = graph[other].parts.take().unwrap();
266
267        /*
268         * Merge neighbors.
269         */
270        for neighbor in other_neighbors.iter() {
271            if *neighbor != valid {
272                let ga = &mut graph[*neighbor];
273
274                // Replace `other` by `valid`.
275                let _ = ga.neighbors.as_mut().unwrap().remove(&other);
276                let _ = ga.neighbors.as_mut().unwrap().insert(valid);
277            }
278        }
279
280        /*
281         * Merge ancestors costs.
282         */
283        let mut new_ancestors = Vec::new();
284        let mut last_ancestor: usize = Bounded::max_value();
285
286        loop {
287            match edge.ancestors.pop() {
288                None => break,
289                Some(ancestor) => {
290                    // Remove duplicates on the go.
291                    if last_ancestor != ancestor.id {
292                        last_ancestor = ancestor.id;
293                        new_ancestors.push(ancestor)
294                    }
295                }
296            }
297        }
298
299        /*
300         * Merge ancestors sets.
301         */
302        let mut other_uancestors = graph[other].uancestors.take().unwrap();
303        let mut valid_uancestors = graph[valid].uancestors.take().unwrap();
304
305        // We will push the smallest one to the biggest.
306        if other_uancestors.len() > valid_uancestors.len() {
307            mem::swap(&mut other_uancestors, &mut valid_uancestors);
308        }
309
310        {
311            for i in other_uancestors.iter() {
312                let _ = valid_uancestors.insert(*i);
313            }
314        }
315
316        /*
317         * Compute the convex hull.
318         */
319        let valid_chull = graph[valid].chull.take().unwrap();
320        let other_chull = graph[other].chull.take().unwrap();
321
322        let mut vtx1 = valid_chull.coords;
323        let vtx2 = other_chull.coords;
324
325        vtx1.extend(vtx2.into_iter());
326
327        // FIXME: use a method to merge convex hulls instead of reconstructing it from scratch.
328        let chull = transformation::convex_hull(&vtx1[..]);
329
330        /*
331         * Merge borders.
332         */
333        let valid_border = graph[valid].border.take().unwrap();
334        let other_border = graph[other].border.take().unwrap();
335        let new_border = valid_border
336            .symmetric_difference(&other_border)
337            .map(|e| *e)
338            .collect();
339
340        /*
341         * Finalize.
342         */
343        let new_aabb = graph[valid].aabb.merged(&graph[other].aabb);
344        let other_area = graph[other].area;
345        let gvalid = &mut graph[valid];
346        gvalid
347            .parts
348            .as_mut()
349            .unwrap()
350            .extend(other_parts.into_iter());
351        gvalid.chull = Some(chull);
352        gvalid.aabb = new_aabb;
353        gvalid
354            .neighbors
355            .as_mut()
356            .unwrap()
357            .extend(other_neighbors.into_iter());
358        let _ = gvalid.neighbors.as_mut().unwrap().remove(&other);
359        let _ = gvalid.neighbors.as_mut().unwrap().remove(&valid);
360        gvalid.ancestors = Some(new_ancestors);
361        gvalid.area = gvalid.area + other_area;
362        gvalid.concavity = edge.concavity;
363        gvalid.uancestors = Some(valid_uancestors);
364        gvalid.border = Some(new_border);
365    }
366}
367
368struct DualGraphEdge<N> {
369    v1: usize,
370    v2: usize,
371    mcost: N,
372    exact: bool,
373    timestamp: usize,
374    ancestors: BinaryHeap<VertexWithConcavity<N>>,
375    iv1: usize,
376    iv2: usize,
377    shape: N,
378    concavity: N,
379    iray_cast: bool,
380}
381
382impl<N: RealField + Copy> DualGraphEdge<N> {
383    pub fn new(
384        timestamp: usize,
385        v1: usize,
386        v2: usize,
387        dual_graph: &[DualGraphVertex<N>],
388        coords: &[Point3<N>],
389        max_concavity: N,
390    ) -> DualGraphEdge<N> {
391        let mut v1 = v1;
392        let mut v2 = v2;
393
394        if v1 > v2 {
395            mem::swap(&mut v1, &mut v2);
396        }
397
398        let border_1 = dual_graph[v1].border.as_ref().unwrap();
399        let border_2 = dual_graph[v2].border.as_ref().unwrap();
400        let mut perimeter = na::zero::<N>();
401
402        for d in border_1
403            .symmetric_difference(border_2)
404            .map(|e| na::distance(&coords[e.x], &coords[e.y]))
405        {
406            perimeter = perimeter + d;
407        }
408
409        let area = dual_graph[v1].area + dual_graph[v2].area;
410
411        // FIXME: refactor this.
412        let aabb = dual_graph[v1].aabb.merged(&dual_graph[v2].aabb);
413        let diagonal = na::distance(&aabb.mins, &aabb.maxs);
414        let shape_cost;
415
416        if area.is_zero() || diagonal.is_zero() {
417            shape_cost = perimeter * perimeter;
418        } else {
419            shape_cost = diagonal * perimeter * perimeter / area;
420        }
421
422        let approx_concavity = dual_graph[v1].concavity.max(dual_graph[v2].concavity);
423
424        DualGraphEdge {
425            v1: v1,
426            v2: v2,
427            mcost: -(approx_concavity + max_concavity * shape_cost),
428            exact: false,
429            timestamp: timestamp,
430            ancestors: BinaryHeap::with_capacity(0),
431            iv1: 0,
432            iv2: 0,
433            shape: shape_cost,
434            concavity: approx_concavity,
435            iray_cast: false,
436        }
437    }
438
439    pub fn is_valid(&self, dual_graph: &[DualGraphVertex<N>]) -> bool {
440        self.timestamp >= dual_graph[self.v1].timestamp
441            && self.timestamp >= dual_graph[self.v2].timestamp
442    }
443
444    pub fn compute_decimation_cost(
445        &mut self,
446        dual_graph: &[DualGraphVertex<N>],
447        rays: &[Ray<N>],
448        bvt: &BVT<usize, AABB<N>>,
449        max_cost: N,
450        max_concavity: N,
451    ) {
452        assert!(self.is_valid(dual_graph));
453
454        let v1 = &dual_graph[self.v1];
455        let v2 = &dual_graph[self.v2];
456
457        /*
458         * Estimate the concavity.
459         */
460        let chull1 = v1.chull.as_ref().unwrap();
461        let chull2 = v2.chull.as_ref().unwrap();
462        let chull = ConvexPair::new(&chull1.coords[..], &chull2.coords[..]);
463        let _max: N = N::max_value().unwrap();
464
465        let a1 = v1.ancestors.as_ref().unwrap();
466        let a2 = v2.ancestors.as_ref().unwrap();
467
468        let max_concavity = max_concavity.min(max_cost - max_concavity * self.shape);
469
470        fn cast_ray<'a, N: RealField + Copy>(
471            chull: &ConvexPair<'a, N>,
472            ray: &Ray<N>,
473            id: usize,
474            concavity: &mut N,
475            ancestors: &mut BinaryHeap<VertexWithConcavity<N>>,
476        ) {
477            let sv = chull.local_support_point(&ray.dir);
478            let distance = sv.coords.dot(&ray.dir);
479
480            if !relative_eq!(distance, na::zero()) {
481                let shift: N = na::convert(0.1f64);
482                let outside_point = ray.origin + ray.dir * (distance + shift);
483
484                match chull.toi_with_ray(
485                    &Isometry::identity(),
486                    &Ray::new(outside_point, -ray.dir),
487                    N::max_value().unwrap(),
488                    true,
489                ) {
490                    None => ancestors.push(VertexWithConcavity::new(id, na::zero())),
491                    Some(toi) => {
492                        let new_concavity = distance + shift - toi;
493                        ancestors.push(VertexWithConcavity::new(id, new_concavity));
494                        if *concavity < new_concavity {
495                            *concavity = new_concavity
496                        }
497                    }
498                }
499            } else {
500                ancestors.push(VertexWithConcavity::new(id, na::zero()));
501            }
502        }
503
504        // Start with the rays we know.
505        while (self.iv1 != a1.len() || self.iv2 != a2.len()) && self.concavity <= max_concavity {
506            let id;
507
508            if self.iv1 != a1.len()
509                && (self.iv2 == a2.len() || a1[self.iv1].concavity > a2[self.iv2].concavity)
510            {
511                id = a1[self.iv1].id;
512                self.iv1 = self.iv1 + 1;
513            } else {
514                id = a2[self.iv2].id;
515                self.iv2 = self.iv2 + 1;
516            }
517
518            let ray = &rays[id].clone();
519
520            if ray.dir.is_zero() {
521                // the ray was set to zero if it was invalid.
522                continue;
523            }
524
525            cast_ray(&chull, ray, id, &mut self.concavity, &mut self.ancestors);
526        }
527
528        // FIXME: (optimization) find a way to stop the cast if we exceed the max concavity.
529        if !self.iray_cast && self.iv1 == a1.len() && self.iv2 == a2.len() {
530            self.iray_cast = true;
531            let mut internal_rays = Vec::new();
532
533            {
534                let aabb = v1.aabb.merged(&v2.aabb);
535                let mut visitor =
536                    BoundingVolumeInterferencesCollector::new(&aabb, &mut internal_rays);
537
538                bvt.visit(&mut visitor);
539            }
540
541            let uancestors_v1 = v1.uancestors.as_ref().unwrap();
542            let uancestors_v2 = v2.uancestors.as_ref().unwrap();
543
544            for ray_id in internal_rays.iter() {
545                let ray = &rays[*ray_id];
546
547                if !uancestors_v1.contains(ray_id) && !uancestors_v2.contains(ray_id) {
548                    // XXX: we could just remove the zero-dir rays from the ancestors list!
549                    if ray.dir.is_zero() {
550                        // The ray was set to zero if it was invalid.
551                        continue;
552                    }
553
554                    // We determine if the point is inside of the convex hull or not.
555                    // XXX: use a point-in-implicit test instead of a ray-cast!
556                    match chull.toi_with_ray(
557                        &Isometry::identity(),
558                        ray,
559                        N::max_value().unwrap(),
560                        true,
561                    ) {
562                        None => continue,
563                        Some(inter) => {
564                            if inter.is_zero() {
565                                // ok, the point is inside, performe the real ray-cast.
566                                cast_ray(
567                                    &chull,
568                                    ray,
569                                    *ray_id,
570                                    &mut self.concavity,
571                                    &mut self.ancestors,
572                                );
573                            }
574                        }
575                    }
576                }
577            }
578        }
579
580        self.mcost = -(self.concavity + max_concavity * self.shape);
581        self.exact = self.iv1 == a1.len() && self.iv2 == a2.len();
582    }
583}
584
585impl<N: PartialEq> PartialEq for DualGraphEdge<N> {
586    #[inline]
587    fn eq(&self, other: &DualGraphEdge<N>) -> bool {
588        self.mcost.eq(&other.mcost)
589    }
590}
591
592impl<N: PartialEq> Eq for DualGraphEdge<N> {}
593
594impl<N: PartialOrd> PartialOrd for DualGraphEdge<N> {
595    #[inline]
596    fn partial_cmp(&self, other: &DualGraphEdge<N>) -> Option<Ordering> {
597        self.mcost.partial_cmp(&other.mcost)
598    }
599}
600
601impl<N: PartialOrd> Ord for DualGraphEdge<N> {
602    #[inline]
603    fn cmp(&self, other: &DualGraphEdge<N>) -> Ordering {
604        if self.mcost < other.mcost {
605            Ordering::Less
606        } else if self.mcost > other.mcost {
607            Ordering::Greater
608        } else {
609            Ordering::Equal
610        }
611    }
612}
613
614struct VertexWithConcavity<N> {
615    id: usize,
616    concavity: N,
617}
618
619impl<N> VertexWithConcavity<N> {
620    #[inline]
621    pub fn new(id: usize, concavity: N) -> VertexWithConcavity<N> {
622        VertexWithConcavity {
623            id: id,
624            concavity: concavity,
625        }
626    }
627}
628
629impl<N: PartialEq> PartialEq for VertexWithConcavity<N> {
630    #[inline]
631    fn eq(&self, other: &VertexWithConcavity<N>) -> bool {
632        self.concavity == other.concavity && self.id == other.id
633    }
634}
635
636impl<N: PartialEq> Eq for VertexWithConcavity<N> {}
637
638impl<N: PartialOrd> PartialOrd for VertexWithConcavity<N> {
639    #[inline]
640    fn partial_cmp(&self, other: &VertexWithConcavity<N>) -> Option<Ordering> {
641        if self.concavity < other.concavity {
642            Some(Ordering::Less)
643        } else if self.concavity > other.concavity {
644            Some(Ordering::Greater)
645        } else {
646            Some(self.id.cmp(&other.id))
647        }
648    }
649}
650
651impl<N: PartialOrd> Ord for VertexWithConcavity<N> {
652    #[inline]
653    fn cmp(&self, other: &VertexWithConcavity<N>) -> Ordering {
654        if self.concavity < other.concavity {
655            Ordering::Less
656        } else if self.concavity > other.concavity {
657            Ordering::Greater
658        } else {
659            self.id.cmp(&other.id)
660        }
661    }
662}
663
664#[inline]
665fn edge(a: u32, b: u32) -> Vector2<usize> {
666    if a > b {
667        Vector2::new(b as usize, a as usize)
668    } else {
669        Vector2::new(a as usize, b as usize)
670    }
671}
672
673fn compute_ray_bvt<N: RealField + Copy>(rays: &[Ray<N>]) -> BVT<usize, AABB<N>> {
674    let aabbs = rays
675        .iter()
676        .enumerate()
677        .map(|(i, r)| (i, AABB::new(r.origin, r.origin)))
678        .collect();
679
680    BVT::new_balanced(aabbs)
681}
682
683fn compute_rays<N: RealField + Copy>(
684    mesh: &TriMesh<N>,
685) -> (Vec<Ray<N>>, HashMap<(u32, u32), usize>) {
686    let mut rays = Vec::new();
687    let mut raymap = HashMap::new();
688
689    {
690        let coords = &mesh.coords[..];
691        let normals = &mesh.normals.as_ref().unwrap()[..];
692
693        let mut add_ray = |coord: u32, normal: u32| {
694            let key = (coord, normal);
695            let existing = match raymap.entry(key) {
696                Entry::Occupied(entry) => entry.into_mut(),
697                Entry::Vacant(entry) => entry.insert(rays.len()),
698            };
699
700            if *existing == rays.len() {
701                let coord = coords[coord as usize].clone();
702                let mut normal = normals[normal as usize].clone();
703
704                if normal.normalize_mut().is_zero() {
705                    normal = na::zero();
706                }
707
708                rays.push(Ray::new(coord, normal));
709            }
710        };
711
712        match mesh.indices {
713            IndexBuffer::Unified(ref b) => {
714                for t in b.iter() {
715                    add_ray(t.x, t.x);
716                    add_ray(t.y, t.y);
717                    add_ray(t.z, t.z);
718                }
719            }
720            IndexBuffer::Split(ref b) => {
721                for t in b.iter() {
722                    add_ray(t.x.x, t.x.y);
723                    add_ray(t.y.x, t.y.y);
724                    add_ray(t.z.x, t.z.y);
725                }
726            }
727        }
728    }
729
730    (rays, raymap)
731}
732
733fn compute_dual_graph<N: RealField + Copy>(
734    mesh: &TriMesh<N>,
735    raymap: &HashMap<(u32, u32), usize>,
736) -> Vec<DualGraphVertex<N>> {
737    // XXX Loss of determinism because of the randomized HashMap.
738    let mut prim_edges = HashMap::new();
739    let mut dual_vertices: Vec<DualGraphVertex<N>> = (0..mesh.num_triangles())
740        .map(|i| DualGraphVertex::new(i, mesh, raymap))
741        .collect();
742
743    {
744        let mut add_triangle_edges = |i: usize, t: &Point3<u32>| {
745            let es = [edge(t.x, t.y), edge(t.y, t.z), edge(t.z, t.x)];
746
747            for e in es.iter() {
748                let other = match prim_edges.entry(*e) {
749                    Entry::Occupied(entry) => entry.into_mut(),
750                    Entry::Vacant(entry) => entry.insert(i),
751                };
752
753                if *other != i {
754                    // register the adjascency.
755                    let _ = dual_vertices[i].neighbors.as_mut().unwrap().insert(*other);
756                    let _ = dual_vertices[*other].neighbors.as_mut().unwrap().insert(i);
757                }
758            }
759        };
760
761        match mesh.indices {
762            IndexBuffer::Unified(ref b) => {
763                for (i, t) in b.iter().enumerate() {
764                    add_triangle_edges(i, t)
765                }
766            }
767            IndexBuffer::Split(ref b) => {
768                for (i, t) in b.iter().enumerate() {
769                    let t_idx = Point3::new(t.x.x, t.y.x, t.z.x);
770                    add_triangle_edges(i, &t_idx)
771                }
772            }
773        }
774    }
775
776    dual_vertices
777}
778
779struct ConvexPair<'a, N: 'a + RealField + Copy> {
780    a: &'a [Point3<N>],
781    b: &'a [Point3<N>],
782}
783
784impl<'a, N: RealField + Copy> ConvexPair<'a, N> {
785    pub fn new(a: &'a [Point3<N>], b: &'a [Point3<N>]) -> ConvexPair<'a, N> {
786        ConvexPair { a: a, b: b }
787    }
788}
789
790impl<'a, N: RealField + Copy> SupportMap<N> for ConvexPair<'a, N> {
791    #[inline]
792    fn support_point(&self, _: &Isometry<N>, dir: &Vector3<N>) -> Point3<N> {
793        self.local_support_point(dir)
794    }
795
796    #[inline]
797    fn local_support_point(&self, dir: &Vector3<N>) -> Point3<N> {
798        let sa = utils::point_cloud_support_point(dir, self.a);
799        let sb = utils::point_cloud_support_point(dir, self.b);
800
801        if sa.coords.dot(dir) > sb.coords.dot(dir) {
802            sa
803        } else {
804            sb
805        }
806    }
807}
808
809impl<'a, N: RealField + Copy> RayCast<N> for ConvexPair<'a, N> {
810    #[inline]
811    fn toi_and_normal_with_ray(
812        &self,
813        id: &Isometry<N>,
814        ray: &Ray<N>,
815        max_toi: N,
816        solid: bool,
817    ) -> Option<RayIntersection<N>> {
818        query::ray_intersection_with_support_map_with_params(
819            id,
820            self,
821            &mut VoronoiSimplex::new(),
822            ray,
823            max_toi,
824            solid,
825        )
826    }
827}