1use itertools::Itertools;
4
5use super::analysis::Sizedness;
6use super::annotations::Annotations;
7use super::context::{BindgenContext, FunctionId, ItemId, TypeId, VarId};
8use super::dot::DotAttributes;
9use super::item::{IsOpaque, Item};
10use super::layout::Layout;
11use super::template::TemplateParameters;
12use super::traversal::{EdgeKind, Trace, Tracer};
13use super::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
14use crate::clang;
15use crate::codegen::struct_layout::{align_to, bytes_from_bits_pow2};
16use crate::ir::derive::CanDeriveCopy;
17use crate::parse::ParseError;
18use crate::HashMap;
19use crate::NonCopyUnionStyle;
20use std::cmp;
21use std::io;
22use std::mem;
23
24#[derive(Debug, Copy, Clone, PartialEq, Eq)]
26pub(crate) enum CompKind {
27 Struct,
29 Union,
31}
32
33#[derive(Debug, Copy, Clone, PartialEq, Eq)]
35pub(crate) enum MethodKind {
36 Constructor,
39 Destructor,
41 VirtualDestructor {
43 pure_virtual: bool,
45 },
46 Static,
48 Normal,
50 Virtual {
52 pure_virtual: bool,
54 },
55}
56
57impl MethodKind {
58 pub(crate) fn is_destructor(&self) -> bool {
60 matches!(
61 *self,
62 MethodKind::Destructor | MethodKind::VirtualDestructor { .. }
63 )
64 }
65
66 pub(crate) fn is_pure_virtual(&self) -> bool {
68 match *self {
69 MethodKind::Virtual { pure_virtual } |
70 MethodKind::VirtualDestructor { pure_virtual } => pure_virtual,
71 _ => false,
72 }
73 }
74}
75
76#[derive(Debug)]
78pub(crate) struct Method {
79 kind: MethodKind,
80 signature: FunctionId,
85 is_const: bool,
86}
87
88impl Method {
89 pub(crate) fn new(
91 kind: MethodKind,
92 signature: FunctionId,
93 is_const: bool,
94 ) -> Self {
95 Method {
96 kind,
97 signature,
98 is_const,
99 }
100 }
101
102 pub(crate) fn kind(&self) -> MethodKind {
104 self.kind
105 }
106
107 pub(crate) fn is_constructor(&self) -> bool {
109 self.kind == MethodKind::Constructor
110 }
111
112 pub(crate) fn is_virtual(&self) -> bool {
114 matches!(
115 self.kind,
116 MethodKind::Virtual { .. } | MethodKind::VirtualDestructor { .. }
117 )
118 }
119
120 pub(crate) fn is_static(&self) -> bool {
122 self.kind == MethodKind::Static
123 }
124
125 pub(crate) fn signature(&self) -> FunctionId {
127 self.signature
128 }
129
130 pub(crate) fn is_const(&self) -> bool {
132 self.is_const
133 }
134}
135
136pub(crate) trait FieldMethods {
138 fn name(&self) -> Option<&str>;
140
141 fn ty(&self) -> TypeId;
143
144 fn comment(&self) -> Option<&str>;
146
147 fn bitfield_width(&self) -> Option<u32>;
149
150 fn is_public(&self) -> bool;
152
153 fn annotations(&self) -> &Annotations;
155
156 fn offset(&self) -> Option<usize>;
158}
159
160#[derive(Debug)]
165pub(crate) struct BitfieldUnit {
166 nth: usize,
167 layout: Layout,
168 bitfields: Vec<Bitfield>,
169}
170
171impl BitfieldUnit {
172 pub(crate) fn nth(&self) -> usize {
176 self.nth
177 }
178
179 pub(crate) fn layout(&self) -> Layout {
181 self.layout
182 }
183
184 pub(crate) fn bitfields(&self) -> &[Bitfield] {
186 &self.bitfields
187 }
188}
189
190#[derive(Debug)]
192pub(crate) enum Field {
193 DataMember(FieldData),
195
196 Bitfields(BitfieldUnit),
198}
199
200impl Field {
201 pub(crate) fn layout(&self, ctx: &BindgenContext) -> Option<Layout> {
203 match *self {
204 Field::Bitfields(BitfieldUnit { layout, .. }) => Some(layout),
205 Field::DataMember(ref data) => {
206 ctx.resolve_type(data.ty).layout(ctx)
207 }
208 }
209 }
210}
211
212impl Trace for Field {
213 type Extra = ();
214
215 fn trace<T>(&self, _: &BindgenContext, tracer: &mut T, _: &())
216 where
217 T: Tracer,
218 {
219 match *self {
220 Field::DataMember(ref data) => {
221 tracer.visit_kind(data.ty.into(), EdgeKind::Field);
222 }
223 Field::Bitfields(BitfieldUnit { ref bitfields, .. }) => {
224 for bf in bitfields {
225 tracer.visit_kind(bf.ty().into(), EdgeKind::Field);
226 }
227 }
228 }
229 }
230}
231
232impl DotAttributes for Field {
233 fn dot_attributes<W>(
234 &self,
235 ctx: &BindgenContext,
236 out: &mut W,
237 ) -> io::Result<()>
238 where
239 W: io::Write,
240 {
241 match *self {
242 Field::DataMember(ref data) => data.dot_attributes(ctx, out),
243 Field::Bitfields(BitfieldUnit {
244 layout,
245 ref bitfields,
246 ..
247 }) => {
248 writeln!(
249 out,
250 r#"<tr>
251 <td>bitfield unit</td>
252 <td>
253 <table border="0">
254 <tr>
255 <td>unit.size</td><td>{}</td>
256 </tr>
257 <tr>
258 <td>unit.align</td><td>{}</td>
259 </tr>
260 "#,
261 layout.size, layout.align
262 )?;
263 for bf in bitfields {
264 bf.dot_attributes(ctx, out)?;
265 }
266 writeln!(out, "</table></td></tr>")
267 }
268 }
269 }
270}
271
272impl DotAttributes for FieldData {
273 fn dot_attributes<W>(
274 &self,
275 _ctx: &BindgenContext,
276 out: &mut W,
277 ) -> io::Result<()>
278 where
279 W: io::Write,
280 {
281 writeln!(
282 out,
283 "<tr><td>{}</td><td>{:?}</td></tr>",
284 self.name().unwrap_or("(anonymous)"),
285 self.ty()
286 )
287 }
288}
289
290impl DotAttributes for Bitfield {
291 fn dot_attributes<W>(
292 &self,
293 _ctx: &BindgenContext,
294 out: &mut W,
295 ) -> io::Result<()>
296 where
297 W: io::Write,
298 {
299 writeln!(
300 out,
301 "<tr><td>{} : {}</td><td>{:?}</td></tr>",
302 self.name().unwrap_or("(anonymous)"),
303 self.width(),
304 self.ty()
305 )
306 }
307}
308
309#[derive(Debug)]
311pub(crate) struct Bitfield {
312 offset_into_unit: usize,
315
316 data: FieldData,
318
319 getter_name: Option<String>,
323
324 setter_name: Option<String>,
328}
329
330impl Bitfield {
331 fn new(offset_into_unit: usize, raw: RawField) -> Bitfield {
333 assert!(raw.bitfield_width().is_some());
334
335 Bitfield {
336 offset_into_unit,
337 data: raw.0,
338 getter_name: None,
339 setter_name: None,
340 }
341 }
342
343 pub(crate) fn offset_into_unit(&self) -> usize {
346 self.offset_into_unit
347 }
348
349 pub(crate) fn width(&self) -> u32 {
351 self.data.bitfield_width().unwrap()
352 }
353
354 pub(crate) fn getter_name(&self) -> &str {
359 assert!(
360 self.name().is_some(),
361 "`Bitfield::getter_name` called on anonymous field"
362 );
363 self.getter_name.as_ref().expect(
364 "`Bitfield::getter_name` should only be called after\
365 assigning bitfield accessor names",
366 )
367 }
368
369 pub(crate) fn setter_name(&self) -> &str {
374 assert!(
375 self.name().is_some(),
376 "`Bitfield::setter_name` called on anonymous field"
377 );
378 self.setter_name.as_ref().expect(
379 "`Bitfield::setter_name` should only be called\
380 after assigning bitfield accessor names",
381 )
382 }
383}
384
385impl FieldMethods for Bitfield {
386 fn name(&self) -> Option<&str> {
387 self.data.name()
388 }
389
390 fn ty(&self) -> TypeId {
391 self.data.ty()
392 }
393
394 fn comment(&self) -> Option<&str> {
395 self.data.comment()
396 }
397
398 fn bitfield_width(&self) -> Option<u32> {
399 self.data.bitfield_width()
400 }
401
402 fn is_public(&self) -> bool {
403 self.data.is_public()
404 }
405
406 fn annotations(&self) -> &Annotations {
407 self.data.annotations()
408 }
409
410 fn offset(&self) -> Option<usize> {
411 self.data.offset()
412 }
413}
414
415#[derive(Debug)]
420struct RawField(FieldData);
421
422impl RawField {
423 fn new(
425 name: Option<String>,
426 ty: TypeId,
427 comment: Option<String>,
428 annotations: Option<Annotations>,
429 bitfield_width: Option<u32>,
430 public: bool,
431 offset: Option<usize>,
432 ) -> RawField {
433 RawField(FieldData {
434 name,
435 ty,
436 comment,
437 annotations: annotations.unwrap_or_default(),
438 bitfield_width,
439 public,
440 offset,
441 })
442 }
443}
444
445impl FieldMethods for RawField {
446 fn name(&self) -> Option<&str> {
447 self.0.name()
448 }
449
450 fn ty(&self) -> TypeId {
451 self.0.ty()
452 }
453
454 fn comment(&self) -> Option<&str> {
455 self.0.comment()
456 }
457
458 fn bitfield_width(&self) -> Option<u32> {
459 self.0.bitfield_width()
460 }
461
462 fn is_public(&self) -> bool {
463 self.0.is_public()
464 }
465
466 fn annotations(&self) -> &Annotations {
467 self.0.annotations()
468 }
469
470 fn offset(&self) -> Option<usize> {
471 self.0.offset()
472 }
473}
474
475fn raw_fields_to_fields_and_bitfield_units<I>(
481 ctx: &BindgenContext,
482 raw_fields: I,
483 packed: bool,
484) -> Result<(Vec<Field>, bool), ()>
485where
486 I: IntoIterator<Item = RawField>,
487{
488 let mut raw_fields = raw_fields.into_iter().fuse().peekable();
489 let mut fields = vec![];
490 let mut bitfield_unit_count = 0;
491
492 loop {
493 {
497 let non_bitfields = raw_fields
498 .by_ref()
499 .peeking_take_while(|f| f.bitfield_width().is_none())
500 .map(|f| Field::DataMember(f.0));
501 fields.extend(non_bitfields);
502 }
503
504 let mut bitfields = raw_fields
508 .by_ref()
509 .peeking_take_while(|f| f.bitfield_width().is_some())
510 .peekable();
511
512 if bitfields.peek().is_none() {
513 break;
514 }
515
516 bitfields_to_allocation_units(
517 ctx,
518 &mut bitfield_unit_count,
519 &mut fields,
520 bitfields,
521 packed,
522 )?;
523 }
524
525 assert!(
526 raw_fields.next().is_none(),
527 "The above loop should consume all items in `raw_fields`"
528 );
529
530 Ok((fields, bitfield_unit_count != 0))
531}
532
533fn bitfields_to_allocation_units<E, I>(
536 ctx: &BindgenContext,
537 bitfield_unit_count: &mut usize,
538 fields: &mut E,
539 raw_bitfields: I,
540 packed: bool,
541) -> Result<(), ()>
542where
543 E: Extend<Field>,
544 I: IntoIterator<Item = RawField>,
545{
546 assert!(ctx.collected_typerefs());
547
548 fn flush_allocation_unit<E>(
560 fields: &mut E,
561 bitfield_unit_count: &mut usize,
562 unit_size_in_bits: usize,
563 unit_align_in_bits: usize,
564 bitfields: Vec<Bitfield>,
565 packed: bool,
566 ) where
567 E: Extend<Field>,
568 {
569 *bitfield_unit_count += 1;
570 let align = if packed {
571 1
572 } else {
573 bytes_from_bits_pow2(unit_align_in_bits)
574 };
575 let size = align_to(unit_size_in_bits, 8) / 8;
576 let layout = Layout::new(size, align);
577 fields.extend(Some(Field::Bitfields(BitfieldUnit {
578 nth: *bitfield_unit_count,
579 layout,
580 bitfields,
581 })));
582 }
583
584 let mut max_align = 0;
585 let mut unfilled_bits_in_unit = 0;
586 let mut unit_size_in_bits = 0;
587 let mut unit_align = 0;
588 let mut bitfields_in_unit = vec![];
589
590 const is_ms_struct: bool = false;
593
594 for bitfield in raw_bitfields {
595 let bitfield_width = bitfield.bitfield_width().unwrap() as usize;
596 let bitfield_layout =
597 ctx.resolve_type(bitfield.ty()).layout(ctx).ok_or(())?;
598 let bitfield_size = bitfield_layout.size;
599 let bitfield_align = bitfield_layout.align;
600
601 let mut offset = unit_size_in_bits;
602 if !packed {
603 if is_ms_struct {
604 if unit_size_in_bits != 0 &&
605 (bitfield_width == 0 ||
606 bitfield_width > unfilled_bits_in_unit)
607 {
608 unit_size_in_bits =
611 align_to(unit_size_in_bits, unit_align * 8);
612 flush_allocation_unit(
613 fields,
614 bitfield_unit_count,
615 unit_size_in_bits,
616 unit_align,
617 mem::take(&mut bitfields_in_unit),
618 packed,
619 );
620
621 offset = 0;
624 unit_align = 0;
625 }
626 } else if offset != 0 &&
627 (bitfield_width == 0 ||
628 (offset & (bitfield_align * 8 - 1)) + bitfield_width >
629 bitfield_size * 8)
630 {
631 offset = align_to(offset, bitfield_align * 8);
632 }
633 }
634
635 if bitfield.name().is_some() {
641 max_align = cmp::max(max_align, bitfield_align);
642
643 unit_align = cmp::max(unit_align, bitfield_width);
648 }
649
650 bitfields_in_unit.push(Bitfield::new(offset, bitfield));
656
657 unit_size_in_bits = offset + bitfield_width;
658
659 let data_size = align_to(unit_size_in_bits, bitfield_align * 8);
663 unfilled_bits_in_unit = data_size - unit_size_in_bits;
664 }
665
666 if unit_size_in_bits != 0 {
667 flush_allocation_unit(
669 fields,
670 bitfield_unit_count,
671 unit_size_in_bits,
672 unit_align,
673 bitfields_in_unit,
674 packed,
675 );
676 }
677
678 Ok(())
679}
680
681#[derive(Debug)]
689enum CompFields {
690 Before(Vec<RawField>),
691 After {
692 fields: Vec<Field>,
693 has_bitfield_units: bool,
694 },
695 Error,
696}
697
698impl Default for CompFields {
699 fn default() -> CompFields {
700 CompFields::Before(vec![])
701 }
702}
703
704impl CompFields {
705 fn append_raw_field(&mut self, raw: RawField) {
706 match *self {
707 CompFields::Before(ref mut raws) => {
708 raws.push(raw);
709 }
710 _ => {
711 panic!(
712 "Must not append new fields after computing bitfield allocation units"
713 );
714 }
715 }
716 }
717
718 fn compute_bitfield_units(&mut self, ctx: &BindgenContext, packed: bool) {
719 let raws = match *self {
720 CompFields::Before(ref mut raws) => mem::take(raws),
721 _ => {
722 panic!("Already computed bitfield units");
723 }
724 };
725
726 let result = raw_fields_to_fields_and_bitfield_units(ctx, raws, packed);
727
728 match result {
729 Ok((fields, has_bitfield_units)) => {
730 *self = CompFields::After {
731 fields,
732 has_bitfield_units,
733 };
734 }
735 Err(()) => {
736 *self = CompFields::Error;
737 }
738 }
739 }
740
741 fn deanonymize_fields(&mut self, ctx: &BindgenContext, methods: &[Method]) {
742 let fields = match *self {
743 CompFields::After { ref mut fields, .. } => fields,
744 CompFields::Error => return,
746 CompFields::Before(_) => {
747 panic!("Not yet computed bitfield units.");
748 }
749 };
750
751 fn has_method(
752 methods: &[Method],
753 ctx: &BindgenContext,
754 name: &str,
755 ) -> bool {
756 methods.iter().any(|method| {
757 let method_name = ctx.resolve_func(method.signature()).name();
758 method_name == name || ctx.rust_mangle(method_name) == name
759 })
760 }
761
762 struct AccessorNamesPair {
763 getter: String,
764 setter: String,
765 }
766
767 let mut accessor_names: HashMap<String, AccessorNamesPair> = fields
768 .iter()
769 .flat_map(|field| match *field {
770 Field::Bitfields(ref bu) => &*bu.bitfields,
771 Field::DataMember(_) => &[],
772 })
773 .filter_map(|bitfield| bitfield.name())
774 .map(|bitfield_name| {
775 let bitfield_name = bitfield_name.to_string();
776 let getter = {
777 let mut getter =
778 ctx.rust_mangle(&bitfield_name).to_string();
779 if has_method(methods, ctx, &getter) {
780 getter.push_str("_bindgen_bitfield");
781 }
782 getter
783 };
784 let setter = {
785 let setter = format!("set_{bitfield_name}");
786 let mut setter = ctx.rust_mangle(&setter).to_string();
787 if has_method(methods, ctx, &setter) {
788 setter.push_str("_bindgen_bitfield");
789 }
790 setter
791 };
792 (bitfield_name, AccessorNamesPair { getter, setter })
793 })
794 .collect();
795
796 let mut anon_field_counter = 0;
797 for field in fields.iter_mut() {
798 match *field {
799 Field::DataMember(FieldData { ref mut name, .. }) => {
800 if name.is_some() {
801 continue;
802 }
803
804 anon_field_counter += 1;
805 *name = Some(format!(
806 "{}{anon_field_counter}",
807 ctx.options().anon_fields_prefix,
808 ));
809 }
810 Field::Bitfields(ref mut bu) => {
811 for bitfield in &mut bu.bitfields {
812 if bitfield.name().is_none() {
813 continue;
814 }
815
816 if let Some(AccessorNamesPair { getter, setter }) =
817 accessor_names.remove(bitfield.name().unwrap())
818 {
819 bitfield.getter_name = Some(getter);
820 bitfield.setter_name = Some(setter);
821 }
822 }
823 }
824 }
825 }
826 }
827
828 fn flex_array_member(&self, ctx: &BindgenContext) -> Option<TypeId> {
830 let fields = match self {
831 CompFields::Before(_) => panic!("raw fields"),
832 CompFields::After { fields, .. } => fields,
833 CompFields::Error => return None, };
835
836 match fields.last()? {
837 Field::Bitfields(..) => None,
838 Field::DataMember(FieldData { ty, .. }) => ctx
839 .resolve_type(*ty)
840 .is_incomplete_array(ctx)
841 .map(|item| item.expect_type_id(ctx)),
842 }
843 }
844}
845
846impl Trace for CompFields {
847 type Extra = ();
848
849 fn trace<T>(&self, context: &BindgenContext, tracer: &mut T, _: &())
850 where
851 T: Tracer,
852 {
853 match *self {
854 CompFields::Error => {}
855 CompFields::Before(ref fields) => {
856 for f in fields {
857 tracer.visit_kind(f.ty().into(), EdgeKind::Field);
858 }
859 }
860 CompFields::After { ref fields, .. } => {
861 for f in fields {
862 f.trace(context, tracer, &());
863 }
864 }
865 }
866 }
867}
868
869#[derive(Clone, Debug)]
871pub(crate) struct FieldData {
872 name: Option<String>,
874
875 ty: TypeId,
877
878 comment: Option<String>,
880
881 annotations: Annotations,
883
884 bitfield_width: Option<u32>,
886
887 public: bool,
889
890 offset: Option<usize>,
892}
893
894impl FieldMethods for FieldData {
895 fn name(&self) -> Option<&str> {
896 self.name.as_deref()
897 }
898
899 fn ty(&self) -> TypeId {
900 self.ty
901 }
902
903 fn comment(&self) -> Option<&str> {
904 self.comment.as_deref()
905 }
906
907 fn bitfield_width(&self) -> Option<u32> {
908 self.bitfield_width
909 }
910
911 fn is_public(&self) -> bool {
912 self.public
913 }
914
915 fn annotations(&self) -> &Annotations {
916 &self.annotations
917 }
918
919 fn offset(&self) -> Option<usize> {
920 self.offset
921 }
922}
923
924#[derive(Clone, Debug, PartialEq, Eq)]
926pub(crate) enum BaseKind {
927 Normal,
933 Virtual,
939}
940
941#[derive(Clone, Debug)]
943pub(crate) struct Base {
944 pub(crate) ty: TypeId,
946 pub(crate) kind: BaseKind,
948 pub(crate) field_name: String,
950 pub(crate) is_pub: bool,
952}
953
954impl Base {
955 pub(crate) fn is_virtual(&self) -> bool {
957 self.kind == BaseKind::Virtual
958 }
959
960 pub(crate) fn requires_storage(&self, ctx: &BindgenContext) -> bool {
962 if self.is_virtual() {
967 return false;
968 }
969
970 if self.ty.is_zero_sized(ctx) {
974 return false;
975 }
976
977 true
978 }
979
980 pub(crate) fn is_public(&self) -> bool {
982 self.is_pub
983 }
984}
985
986#[derive(Debug)]
992pub(crate) struct CompInfo {
993 kind: CompKind,
995
996 fields: CompFields,
998
999 template_params: Vec<TypeId>,
1004
1005 methods: Vec<Method>,
1007
1008 constructors: Vec<FunctionId>,
1010
1011 destructor: Option<(MethodKind, FunctionId)>,
1014
1015 base_members: Vec<Base>,
1017
1018 inner_types: Vec<TypeId>,
1031
1032 inner_vars: Vec<VarId>,
1034
1035 has_own_virtual_method: bool,
1038
1039 has_destructor: bool,
1041
1042 has_nonempty_base: bool,
1046
1047 has_non_type_template_params: bool,
1050
1051 has_unevaluable_bit_field_width: bool,
1055
1056 packed_attr: bool,
1058
1059 found_unknown_attr: bool,
1066
1067 is_forward_declaration: bool,
1070}
1071
1072impl CompInfo {
1073 pub(crate) fn new(kind: CompKind) -> Self {
1075 CompInfo {
1076 kind,
1077 fields: CompFields::default(),
1078 template_params: vec![],
1079 methods: vec![],
1080 constructors: vec![],
1081 destructor: None,
1082 base_members: vec![],
1083 inner_types: vec![],
1084 inner_vars: vec![],
1085 has_own_virtual_method: false,
1086 has_destructor: false,
1087 has_nonempty_base: false,
1088 has_non_type_template_params: false,
1089 has_unevaluable_bit_field_width: false,
1090 packed_attr: false,
1091 found_unknown_attr: false,
1092 is_forward_declaration: false,
1093 }
1094 }
1095
1096 pub(crate) fn layout(&self, ctx: &BindgenContext) -> Option<Layout> {
1105 if self.kind == CompKind::Struct {
1107 return None;
1108 }
1109
1110 if self.is_forward_declaration() {
1113 return None;
1114 }
1115
1116 if !self.has_fields() {
1118 return None;
1119 }
1120
1121 let mut max_size = 0;
1122 let mut max_align = 1;
1124 self.each_known_field_layout(ctx, |layout| {
1125 max_size = cmp::max(max_size, layout.size);
1126 max_align = cmp::max(max_align, layout.align);
1127 });
1128
1129 Some(Layout::new(max_size, max_align))
1130 }
1131
1132 pub(crate) fn fields(&self) -> &[Field] {
1134 match self.fields {
1135 CompFields::Error => &[],
1136 CompFields::After { ref fields, .. } => fields,
1137 CompFields::Before(..) => {
1138 panic!("Should always have computed bitfield units first");
1139 }
1140 }
1141 }
1142
1143 pub(crate) fn flex_array_member(
1145 &self,
1146 ctx: &BindgenContext,
1147 ) -> Option<TypeId> {
1148 self.fields.flex_array_member(ctx)
1149 }
1150
1151 fn has_fields(&self) -> bool {
1152 match self.fields {
1153 CompFields::Error => false,
1154 CompFields::After { ref fields, .. } => !fields.is_empty(),
1155 CompFields::Before(ref raw_fields) => !raw_fields.is_empty(),
1156 }
1157 }
1158
1159 fn each_known_field_layout(
1160 &self,
1161 ctx: &BindgenContext,
1162 mut callback: impl FnMut(Layout),
1163 ) {
1164 match self.fields {
1165 CompFields::Error => {}
1166 CompFields::After { ref fields, .. } => {
1167 for field in fields {
1168 if let Some(layout) = field.layout(ctx) {
1169 callback(layout);
1170 }
1171 }
1172 }
1173 CompFields::Before(ref raw_fields) => {
1174 for field in raw_fields {
1175 let field_ty = ctx.resolve_type(field.0.ty);
1176 if let Some(layout) = field_ty.layout(ctx) {
1177 callback(layout);
1178 }
1179 }
1180 }
1181 }
1182 }
1183
1184 fn has_bitfields(&self) -> bool {
1185 match self.fields {
1186 CompFields::Error => false,
1187 CompFields::After {
1188 has_bitfield_units, ..
1189 } => has_bitfield_units,
1190 CompFields::Before(_) => {
1191 panic!("Should always have computed bitfield units first");
1192 }
1193 }
1194 }
1195
1196 pub(crate) fn has_too_large_bitfield_unit(&self) -> bool {
1200 if !self.has_bitfields() {
1201 return false;
1202 }
1203 self.fields().iter().any(|field| match *field {
1204 Field::DataMember(..) => false,
1205 Field::Bitfields(ref unit) => {
1206 unit.layout.size > RUST_DERIVE_IN_ARRAY_LIMIT
1207 }
1208 })
1209 }
1210
1211 pub(crate) fn has_non_type_template_params(&self) -> bool {
1214 self.has_non_type_template_params
1215 }
1216
1217 pub(crate) fn has_own_virtual_method(&self) -> bool {
1220 self.has_own_virtual_method
1221 }
1222
1223 pub(crate) fn has_own_destructor(&self) -> bool {
1225 self.has_destructor
1226 }
1227
1228 pub(crate) fn methods(&self) -> &[Method] {
1230 &self.methods
1231 }
1232
1233 pub(crate) fn constructors(&self) -> &[FunctionId] {
1235 &self.constructors
1236 }
1237
1238 pub(crate) fn destructor(&self) -> Option<(MethodKind, FunctionId)> {
1240 self.destructor
1241 }
1242
1243 pub(crate) fn kind(&self) -> CompKind {
1245 self.kind
1246 }
1247
1248 pub(crate) fn is_union(&self) -> bool {
1250 self.kind() == CompKind::Union
1251 }
1252
1253 pub(crate) fn base_members(&self) -> &[Base] {
1255 &self.base_members
1256 }
1257
1258 pub(crate) fn from_ty(
1260 potential_id: ItemId,
1261 ty: &clang::Type,
1262 location: Option<clang::Cursor>,
1263 ctx: &mut BindgenContext,
1264 ) -> Result<Self, ParseError> {
1265 use clang_sys::*;
1266 assert!(
1267 ty.template_args().is_none(),
1268 "We handle template instantiations elsewhere"
1269 );
1270
1271 let mut cursor = ty.declaration();
1272 let mut kind = Self::kind_from_cursor(&cursor);
1273 if kind.is_err() {
1274 if let Some(location) = location {
1275 kind = Self::kind_from_cursor(&location);
1276 cursor = location;
1277 }
1278 }
1279
1280 let kind = kind?;
1281
1282 debug!("CompInfo::from_ty({kind:?}, {cursor:?})");
1283
1284 let mut ci = CompInfo::new(kind);
1285 ci.is_forward_declaration =
1286 location.map_or(true, |cur| match cur.kind() {
1287 CXCursor_ParmDecl => true,
1288 CXCursor_StructDecl | CXCursor_UnionDecl |
1289 CXCursor_ClassDecl => !cur.is_definition(),
1290 _ => false,
1291 });
1292
1293 let mut maybe_anonymous_struct_field = None;
1294 cursor.visit(|cur| {
1295 if cur.kind() != CXCursor_FieldDecl {
1296 if let Some((ty, clang_ty, public, offset)) =
1297 maybe_anonymous_struct_field.take()
1298 {
1299 if cur.kind() == CXCursor_TypedefDecl &&
1300 cur.typedef_type().unwrap().canonical_type() ==
1301 clang_ty
1302 {
1303 } else {
1308 let field = RawField::new(
1309 None, ty, None, None, None, public, offset,
1310 );
1311 ci.fields.append_raw_field(field);
1312 }
1313 }
1314 }
1315
1316 match cur.kind() {
1317 CXCursor_FieldDecl => {
1318 if let Some((ty, clang_ty, public, offset)) =
1319 maybe_anonymous_struct_field.take()
1320 {
1321 let mut used = false;
1322 cur.visit(|child| {
1323 if child.cur_type() == clang_ty {
1324 used = true;
1325 }
1326 CXChildVisit_Continue
1327 });
1328
1329 if !used {
1330 let field = RawField::new(
1331 None, ty, None, None, None, public, offset,
1332 );
1333 ci.fields.append_raw_field(field);
1334 }
1335 }
1336
1337 let bit_width = if cur.is_bit_field() {
1338 let width = cur.bit_width();
1339
1340 if width.is_none() {
1343 ci.has_unevaluable_bit_field_width = true;
1344 return CXChildVisit_Break;
1345 }
1346
1347 width
1348 } else {
1349 None
1350 };
1351
1352 let field_type = Item::from_ty_or_ref(
1353 cur.cur_type(),
1354 cur,
1355 Some(potential_id),
1356 ctx,
1357 );
1358
1359 let comment = cur.raw_comment();
1360 let annotations = Annotations::new(&cur);
1361 let name = cur.spelling();
1362 let is_public = cur.public_accessible();
1363 let offset = cur.offset_of_field().ok();
1364
1365 assert!(
1368 !name.is_empty() || bit_width.is_some(),
1369 "Empty field name?"
1370 );
1371
1372 let name = if name.is_empty() { None } else { Some(name) };
1373
1374 let field = RawField::new(
1375 name,
1376 field_type,
1377 comment,
1378 annotations,
1379 bit_width,
1380 is_public,
1381 offset,
1382 );
1383 ci.fields.append_raw_field(field);
1384
1385 cur.visit(|cur| {
1387 if cur.kind() == CXCursor_UnexposedAttr {
1388 ci.found_unknown_attr = true;
1389 }
1390 CXChildVisit_Continue
1391 });
1392 }
1393 CXCursor_UnexposedAttr => {
1394 ci.found_unknown_attr = true;
1395 }
1396 CXCursor_EnumDecl |
1397 CXCursor_TypeAliasDecl |
1398 CXCursor_TypeAliasTemplateDecl |
1399 CXCursor_TypedefDecl |
1400 CXCursor_StructDecl |
1401 CXCursor_UnionDecl |
1402 CXCursor_ClassTemplate |
1403 CXCursor_ClassDecl => {
1404 let is_inner_struct =
1417 cur.semantic_parent() == cursor || cur.is_definition();
1418 if !is_inner_struct {
1419 return CXChildVisit_Continue;
1420 }
1421
1422 let inner = Item::parse(cur, Some(potential_id), ctx)
1425 .expect("Inner ClassDecl");
1426
1427 if ctx.resolve_item_fallible(inner).is_some() {
1431 let inner = inner.expect_type_id(ctx);
1432
1433 ci.inner_types.push(inner);
1434
1435 if cur.is_anonymous() && cur.kind() != CXCursor_EnumDecl
1438 {
1439 let ty = cur.cur_type();
1440 let public = cur.public_accessible();
1441 let offset = cur.offset_of_field().ok();
1442
1443 maybe_anonymous_struct_field =
1444 Some((inner, ty, public, offset));
1445 }
1446 }
1447 }
1448 CXCursor_PackedAttr => {
1449 ci.packed_attr = true;
1450 }
1451 CXCursor_TemplateTypeParameter => {
1452 let param = Item::type_param(None, cur, ctx).expect(
1453 "Item::type_param shouldn't fail when pointing \
1454 at a TemplateTypeParameter",
1455 );
1456 ci.template_params.push(param);
1457 }
1458 CXCursor_CXXBaseSpecifier => {
1459 let is_virtual_base = cur.is_virtual_base();
1460 ci.has_own_virtual_method |= is_virtual_base;
1461
1462 let kind = if is_virtual_base {
1463 BaseKind::Virtual
1464 } else {
1465 BaseKind::Normal
1466 };
1467
1468 let field_name = match ci.base_members.len() {
1469 0 => "_base".into(),
1470 n => format!("_base_{n}"),
1471 };
1472 let type_id =
1473 Item::from_ty_or_ref(cur.cur_type(), cur, None, ctx);
1474 ci.base_members.push(Base {
1475 ty: type_id,
1476 kind,
1477 field_name,
1478 is_pub: cur.access_specifier() == CX_CXXPublic,
1479 });
1480 }
1481 CXCursor_Constructor | CXCursor_Destructor |
1482 CXCursor_CXXMethod => {
1483 let is_virtual = cur.method_is_virtual();
1484 let is_static = cur.method_is_static();
1485 debug_assert!(!(is_static && is_virtual), "How?");
1486
1487 ci.has_destructor |= cur.kind() == CXCursor_Destructor;
1488 ci.has_own_virtual_method |= is_virtual;
1489
1490 if !ci.template_params.is_empty() {
1501 return CXChildVisit_Continue;
1502 }
1503
1504 let signature =
1507 match Item::parse(cur, Some(potential_id), ctx) {
1508 Ok(item)
1509 if ctx
1510 .resolve_item(item)
1511 .kind()
1512 .is_function() =>
1513 {
1514 item
1515 }
1516 _ => return CXChildVisit_Continue,
1517 };
1518
1519 let signature = signature.expect_function_id(ctx);
1520
1521 match cur.kind() {
1522 CXCursor_Constructor => {
1523 ci.constructors.push(signature);
1524 }
1525 CXCursor_Destructor => {
1526 let kind = if is_virtual {
1527 MethodKind::VirtualDestructor {
1528 pure_virtual: cur.method_is_pure_virtual(),
1529 }
1530 } else {
1531 MethodKind::Destructor
1532 };
1533 ci.destructor = Some((kind, signature));
1534 }
1535 CXCursor_CXXMethod => {
1536 let is_const = cur.method_is_const();
1537 let method_kind = if is_static {
1538 MethodKind::Static
1539 } else if is_virtual {
1540 MethodKind::Virtual {
1541 pure_virtual: cur.method_is_pure_virtual(),
1542 }
1543 } else {
1544 MethodKind::Normal
1545 };
1546
1547 let method =
1548 Method::new(method_kind, signature, is_const);
1549
1550 ci.methods.push(method);
1551 }
1552 _ => unreachable!("How can we see this here?"),
1553 }
1554 }
1555 CXCursor_NonTypeTemplateParameter => {
1556 ci.has_non_type_template_params = true;
1557 }
1558 CXCursor_VarDecl => {
1559 let linkage = cur.linkage();
1560 if linkage != CXLinkage_External &&
1561 linkage != CXLinkage_UniqueExternal
1562 {
1563 return CXChildVisit_Continue;
1564 }
1565
1566 let visibility = cur.visibility();
1567 if visibility != CXVisibility_Default {
1568 return CXChildVisit_Continue;
1569 }
1570
1571 if let Ok(item) = Item::parse(cur, Some(potential_id), ctx)
1572 {
1573 ci.inner_vars.push(item.as_var_id_unchecked());
1574 }
1575 }
1576 CXCursor_CXXAccessSpecifier |
1578 CXCursor_CXXFinalAttr |
1579 CXCursor_FunctionTemplate |
1580 CXCursor_ConversionFunction => {}
1581 _ => {
1582 warn!(
1583 "unhandled comp member `{}` (kind {:?}) in `{}` ({})",
1584 cur.spelling(),
1585 clang::kind_to_str(cur.kind()),
1586 cursor.spelling(),
1587 cur.location()
1588 );
1589 }
1590 }
1591 CXChildVisit_Continue
1592 });
1593
1594 if let Some((ty, _, public, offset)) = maybe_anonymous_struct_field {
1595 let field =
1596 RawField::new(None, ty, None, None, None, public, offset);
1597 ci.fields.append_raw_field(field);
1598 }
1599
1600 Ok(ci)
1601 }
1602
1603 fn kind_from_cursor(
1604 cursor: &clang::Cursor,
1605 ) -> Result<CompKind, ParseError> {
1606 use clang_sys::*;
1607 Ok(match cursor.kind() {
1608 CXCursor_UnionDecl => CompKind::Union,
1609 CXCursor_ClassDecl | CXCursor_StructDecl => CompKind::Struct,
1610 CXCursor_CXXBaseSpecifier |
1611 CXCursor_ClassTemplatePartialSpecialization |
1612 CXCursor_ClassTemplate => match cursor.template_kind() {
1613 CXCursor_UnionDecl => CompKind::Union,
1614 _ => CompKind::Struct,
1615 },
1616 _ => {
1617 warn!("Unknown kind for comp type: {cursor:?}");
1618 return Err(ParseError::Continue);
1619 }
1620 })
1621 }
1622
1623 pub(crate) fn inner_types(&self) -> &[TypeId] {
1626 &self.inner_types
1627 }
1628
1629 pub(crate) fn inner_vars(&self) -> &[VarId] {
1631 &self.inner_vars
1632 }
1633
1634 pub(crate) fn found_unknown_attr(&self) -> bool {
1637 self.found_unknown_attr
1638 }
1639
1640 pub(crate) fn is_packed(
1642 &self,
1643 ctx: &BindgenContext,
1644 layout: Option<&Layout>,
1645 ) -> bool {
1646 if self.packed_attr {
1647 return true;
1648 }
1649
1650 if let Some(parent_layout) = layout {
1653 let mut packed = false;
1654 self.each_known_field_layout(ctx, |layout| {
1655 packed = packed || layout.align > parent_layout.align;
1656 });
1657 if packed {
1658 info!("Found a struct that was defined within `#pragma packed(...)`");
1659 return true;
1660 }
1661
1662 if self.has_own_virtual_method && parent_layout.align == 1 {
1663 return true;
1664 }
1665 }
1666
1667 false
1668 }
1669
1670 pub(crate) fn already_packed(&self, ctx: &BindgenContext) -> Option<bool> {
1675 let mut total_size: usize = 0;
1676
1677 for field in self.fields() {
1678 let layout = field.layout(ctx)?;
1679
1680 if layout.align != 0 && total_size % layout.align != 0 {
1681 return Some(false);
1682 }
1683
1684 total_size += layout.size;
1685 }
1686
1687 Some(true)
1688 }
1689
1690 pub(crate) fn is_forward_declaration(&self) -> bool {
1692 self.is_forward_declaration
1693 }
1694
1695 pub(crate) fn compute_bitfield_units(
1697 &mut self,
1698 ctx: &BindgenContext,
1699 layout: Option<&Layout>,
1700 ) {
1701 let packed = self.is_packed(ctx, layout);
1702 self.fields.compute_bitfield_units(ctx, packed);
1703 }
1704
1705 pub(crate) fn deanonymize_fields(&mut self, ctx: &BindgenContext) {
1707 self.fields.deanonymize_fields(ctx, &self.methods);
1708 }
1709
1710 pub(crate) fn is_rust_union(
1720 &self,
1721 ctx: &BindgenContext,
1722 layout: Option<&Layout>,
1723 name: &str,
1724 ) -> (bool, bool) {
1725 if !self.is_union() {
1726 return (false, false);
1727 }
1728
1729 if !ctx.options().untagged_union {
1730 return (false, false);
1731 }
1732
1733 if self.is_forward_declaration() {
1734 return (false, false);
1735 }
1736
1737 let union_style = if ctx.options().bindgen_wrapper_union.matches(name) {
1738 NonCopyUnionStyle::BindgenWrapper
1739 } else if ctx.options().manually_drop_union.matches(name) {
1740 NonCopyUnionStyle::ManuallyDrop
1741 } else {
1742 ctx.options().default_non_copy_union_style
1743 };
1744
1745 let all_can_copy = self.fields().iter().all(|f| match *f {
1746 Field::DataMember(ref field_data) => {
1747 field_data.ty().can_derive_copy(ctx)
1748 }
1749 Field::Bitfields(_) => true,
1750 });
1751
1752 if !all_can_copy && union_style == NonCopyUnionStyle::BindgenWrapper {
1753 return (false, false);
1754 }
1755
1756 if layout.is_some_and(|l| l.size == 0) {
1757 return (false, false);
1758 }
1759
1760 (true, all_can_copy)
1761 }
1762}
1763
1764impl DotAttributes for CompInfo {
1765 fn dot_attributes<W>(
1766 &self,
1767 ctx: &BindgenContext,
1768 out: &mut W,
1769 ) -> io::Result<()>
1770 where
1771 W: io::Write,
1772 {
1773 writeln!(out, "<tr><td>CompKind</td><td>{:?}</td></tr>", self.kind)?;
1774
1775 if self.has_own_virtual_method {
1776 writeln!(out, "<tr><td>has_vtable</td><td>true</td></tr>")?;
1777 }
1778
1779 if self.has_destructor {
1780 writeln!(out, "<tr><td>has_destructor</td><td>true</td></tr>")?;
1781 }
1782
1783 if self.has_nonempty_base {
1784 writeln!(out, "<tr><td>has_nonempty_base</td><td>true</td></tr>")?;
1785 }
1786
1787 if self.has_non_type_template_params {
1788 writeln!(
1789 out,
1790 "<tr><td>has_non_type_template_params</td><td>true</td></tr>"
1791 )?;
1792 }
1793
1794 if self.packed_attr {
1795 writeln!(out, "<tr><td>packed_attr</td><td>true</td></tr>")?;
1796 }
1797
1798 if self.is_forward_declaration {
1799 writeln!(
1800 out,
1801 "<tr><td>is_forward_declaration</td><td>true</td></tr>"
1802 )?;
1803 }
1804
1805 if !self.fields().is_empty() {
1806 writeln!(out, r#"<tr><td>fields</td><td><table border="0">"#)?;
1807 for field in self.fields() {
1808 field.dot_attributes(ctx, out)?;
1809 }
1810 writeln!(out, "</table></td></tr>")?;
1811 }
1812
1813 Ok(())
1814 }
1815}
1816
1817impl IsOpaque for CompInfo {
1818 type Extra = Option<Layout>;
1819
1820 fn is_opaque(&self, ctx: &BindgenContext, layout: &Option<Layout>) -> bool {
1821 if self.has_non_type_template_params ||
1822 self.has_unevaluable_bit_field_width
1823 {
1824 return true;
1825 }
1826
1827 if let CompFields::Error = self.fields {
1832 return true;
1833 }
1834
1835 if self.fields().iter().any(|f| match *f {
1839 Field::DataMember(_) => false,
1840 Field::Bitfields(ref unit) => unit.bitfields().iter().any(|bf| {
1841 let bitfield_layout = ctx
1842 .resolve_type(bf.ty())
1843 .layout(ctx)
1844 .expect("Bitfield without layout? Gah!");
1845 bf.width() / 8 > bitfield_layout.size as u32
1846 }),
1847 }) {
1848 return true;
1849 }
1850
1851 if !ctx.options().rust_features().repr_packed_n {
1852 if self.is_packed(ctx, layout.as_ref()) &&
1858 layout.map_or(false, |l| l.align > 1)
1859 {
1860 warn!("Found a type that is both packed and aligned to greater than \
1861 1; Rust before version 1.33 doesn't have `#[repr(packed(N))]`, so we \
1862 are treating it as opaque. You may wish to set bindgen's rust target \
1863 version to 1.33 or later to enable `#[repr(packed(N))]` support.");
1864 return true;
1865 }
1866 }
1867
1868 false
1869 }
1870}
1871
1872impl TemplateParameters for CompInfo {
1873 fn self_template_params(&self, _ctx: &BindgenContext) -> Vec<TypeId> {
1874 self.template_params.clone()
1875 }
1876}
1877
1878impl Trace for CompInfo {
1879 type Extra = Item;
1880
1881 fn trace<T>(&self, context: &BindgenContext, tracer: &mut T, item: &Item)
1882 where
1883 T: Tracer,
1884 {
1885 for p in item.all_template_params(context) {
1886 tracer.visit_kind(p.into(), EdgeKind::TemplateParameterDefinition);
1887 }
1888
1889 for ty in self.inner_types() {
1890 tracer.visit_kind(ty.into(), EdgeKind::InnerType);
1891 }
1892
1893 for &var in self.inner_vars() {
1894 tracer.visit_kind(var.into(), EdgeKind::InnerVar);
1895 }
1896
1897 for method in self.methods() {
1898 tracer.visit_kind(method.signature.into(), EdgeKind::Method);
1899 }
1900
1901 if let Some((_kind, signature)) = self.destructor() {
1902 tracer.visit_kind(signature.into(), EdgeKind::Destructor);
1903 }
1904
1905 for ctor in self.constructors() {
1906 tracer.visit_kind(ctor.into(), EdgeKind::Constructor);
1907 }
1908
1909 if item.is_opaque(context, &()) {
1912 return;
1913 }
1914
1915 for base in self.base_members() {
1916 tracer.visit_kind(base.ty.into(), EdgeKind::BaseMember);
1917 }
1918
1919 self.fields.trace(context, tracer, &());
1920 }
1921}