@@ -670,3 +670,224 @@ test "Extended C ABI casting" {
670
670
try testing .expect (@TypeOf (Macros .L_SUFFIX (math .maxInt (c_long ) + 1 )) == c_longlong ); // comptime_int -> c_longlong
671
671
}
672
672
}
673
+
674
+ const BitfieldEmulation = struct {
675
+ /// By default the bits are allocated from LSB to MSB
676
+ /// (follows Zig's packed struct and most ABI).
677
+ /// Sets to true to allocate from MSB to LSB.
678
+ reverse_bits : bool ,
679
+ /// Most of ABIs starts a new storage unit after a unnamed zero-bit width bit field.
680
+ /// Some ABIs ignores that, sets to false.
681
+ unnamed_void_boundary : bool ,
682
+ /// Some ABIs allow a bitfield straddles on storage units.
683
+ /// Some ABIs, like MSVC, don't straddle, sets to false.
684
+ straddle : bool ,
685
+ /// Also called 'steal padding'.
686
+ /// This option allows to steal the space from the previous padding for the next field.
687
+ collapse_padding : bool ,
688
+
689
+ fn fromTarget (target : std.Target ) ? BitfieldEmulation {
690
+ return switch (target .cpu .arch ) {
691
+ .x86_64 , .x86 = > .{
692
+ .reverse_bits = false ,
693
+ .unnamed_void_boundary = true ,
694
+ .straddle = false ,
695
+ .collapse_padding = switch (target .os .tag ) {
696
+ .windows = > false ,
697
+ else = > true ,
698
+ },
699
+ },
700
+ .aarch64 = > .{
701
+ .reverse_bits = false ,
702
+ .unnamed_void_boundary = true ,
703
+ .straddle = false ,
704
+ .collapse_padding = true ,
705
+ },
706
+ else = > null ,
707
+ };
708
+ }
709
+
710
+ fn merge (base : BitfieldEmulation , apply : anytype ) BitfieldEmulation {
711
+ var copy = base ;
712
+ for (std .meta .fieldNames (@This ())) | name | {
713
+ if (@hasField (@TypeOf (apply ), name )) {
714
+ @field (copy , name ) = @field (apply , name );
715
+ }
716
+ }
717
+ return copy ;
718
+ }
719
+ };
720
+
721
+ pub const Bitfield = struct {
722
+ /// The field name.
723
+ name : [:0 ]const u8 ,
724
+ /// The actual type of the field. For a bitfield,
725
+ /// It's the bit-sized unsigned int.
726
+ ///
727
+ /// Like in C `unsigned field0: 1` the type is `u1`.
728
+ type : type ,
729
+ /// The backing integer for this field.
730
+ ///
731
+ /// Like in C `unsigned field0: 1`, the backing integer is `c_uint`.
732
+ backing_integer : ? type = null ,
733
+ /// If the field is a pointer, it will be treated as `usize` and we avoid accessing the type.
734
+ ///
735
+ /// This helps to avoid the dependency loop problem.
736
+ is_pointer : bool = false ,
737
+ };
738
+
739
+ fn makePaddingField (comptime bitsize : comptime_int , fieldNameCount : comptime_int ) std.builtin.Type.StructField {
740
+ return makePaddingFieldWithName (bitsize , std .fmt .comptimePrint (" pad_{},+{}b" , .{ fieldNameCount , bitsize }));
741
+ }
742
+
743
+ fn makePaddingFieldWithName (comptime bitsize : comptime_int , fieldName : [:0 ]const u8 ) std.builtin.Type.StructField {
744
+ const T = @Type (.{ .int = .{
745
+ .signedness = .unsigned ,
746
+ .bits = bitsize ,
747
+ } });
748
+ return .{
749
+ .alignment = 0 ,
750
+ .type = T ,
751
+ .default_value = & std .mem .zeroes (T ),
752
+ .name = fieldName ,
753
+ .is_comptime = false ,
754
+ };
755
+ }
756
+
757
+ fn isPaddingField (field : ? * const std.builtin.Type.StructField ) bool {
758
+ return if (field ) | f | std .mem .startsWith (u8 , f .name , " pad_" ) else false ;
759
+ }
760
+
761
+ /// Translate a packed struct type to adapt the C bitfields on the target platform.
762
+ ///
763
+ /// If the target platform is unsupported, an opaque type will be returned.
764
+ ///
765
+ /// `fields` is the struct definition.
766
+ /// `modCfg` is the configuration accepted by `BitfieldEmulation.merge`.
767
+ ///
768
+ /// Be advised that, the bitfields have different representation range in different ABI.
769
+ /// This function assumes all bitfields are unsigned.
770
+ pub fn EmulateBitfieldStruct (comptime fields : []const Bitfield , comptime modCfg : anytype ) type {
771
+ const cfg = if (BitfieldEmulation .fromTarget (builtin .target )) | cfg |
772
+ cfg .merge (modCfg )
773
+ else {
774
+ return opaque {};
775
+ };
776
+
777
+ // TODO: implement reverse_bits
778
+ if (cfg .reverse_bits ) @compileError ("TODO: reverse_bit is not implemented" );
779
+
780
+ comptime var finals : std .BoundedArray (std.builtin.Type.StructField , fields .len * 2 ) = .{};
781
+ comptime var lastBackingInt : ? type = null ;
782
+ comptime var leftBitWidth = 0 ;
783
+ comptime var padFieldCount = 0 ;
784
+ comptime var lastField : ? * std.builtin.Type.StructField = null ;
785
+ // The used space in bits
786
+ comptime var offset = 0 ;
787
+
788
+ for (fields , 0.. fields .len ) | field , _ | {
789
+ if (comptime ! field .is_pointer and @typeInfo (field .type ) == .@"struct" and @typeInfo (field .type ).@"struct" .layout == .@"extern" ) {
790
+ return opaque {};
791
+ }
792
+ if (field .backing_integer ) | BackingInt | {
793
+ const requiredBits = @typeInfo (field .type ).int .bits ;
794
+ if (leftBitWidth < requiredBits ) {
795
+ if (! cfg .straddle and (leftBitWidth > 0 )) {
796
+ // add padding to use a new unit for the next field
797
+ finals .appendAssumeCapacity (makePaddingField (leftBitWidth , padFieldCount ));
798
+ lastField = & finals .slice ()[finals .len - 1 ];
799
+ padFieldCount += 1 ;
800
+ leftBitWidth = 0 ;
801
+ }
802
+
803
+ if (offset % @alignOf (BackingInt ) != 0 ) {
804
+ const padding = (@divTrunc (offset , @alignOf (BackingInt )) + 1 ) * @alignOf (BackingInt ) - offset ;
805
+ offset += padding ;
806
+
807
+ finals .appendAssumeCapacity (makePaddingField (padding * 8 , padFieldCount ));
808
+ lastField = & finals .slice ()[finals .len - 1 ];
809
+ padFieldCount += 1 ;
810
+ } else if (isPaddingField (lastField ) and cfg .collapse_padding ) {
811
+ // Maybe we need to steal padding
812
+ const lfield = lastField .? ;
813
+ const mlp = @divTrunc (@bitSizeOf (lfield .type ), @alignOf (BackingInt ) * 8 );
814
+ if (mlp >= 1 ) {
815
+ const stolePadding = @alignOf (BackingInt ) * mlp ;
816
+ const nsize = @bitSizeOf (lfield .type ) - (stolePadding * 8 );
817
+ fields .set (fields .len - 1 , makePaddingFieldWithName (
818
+ nsize ,
819
+ std .fmt .comptimePrint ("{s},-{}b" , .{ lfield .name , stolePadding * 8 }),
820
+ ));
821
+ offset -= stolePadding ;
822
+ }
823
+ }
824
+
825
+ lastBackingInt = BackingInt ;
826
+ leftBitWidth += @bitSizeOf (BackingInt );
827
+ }
828
+
829
+ leftBitWidth -= @bitSizeOf (field .type );
830
+ finals .appendAssumeCapacity (.{
831
+ .alignment = 0 ,
832
+ .default_value = & std .mem .zeroes (field .type ),
833
+ .is_comptime = false ,
834
+ .name = field .name ,
835
+ .type = field .type ,
836
+ });
837
+ lastField = & finals .slice ()[finals .len - 1 ];
838
+ } else {
839
+ const LayoutAs = if (field .is_pointer ) usize else field .type ;
840
+
841
+ if (leftBitWidth > 0 ) {
842
+ finals .appendAssumeCapacity (makePaddingField (leftBitWidth , padFieldCount ));
843
+ lastField = & finals .slice ()[finals .len - 1 ];
844
+ padFieldCount += 1 ;
845
+ offset += leftBitWidth ;
846
+ }
847
+ leftBitWidth = 0 ;
848
+ lastBackingInt = null ;
849
+
850
+ if (offset % @alignOf (LayoutAs ) != 0 ) {
851
+ const padding = (@divTrunc (offset , @alignOf (LayoutAs )) + 1 ) * @alignOf (LayoutAs ) - offset ;
852
+ offset += padding ;
853
+
854
+ finals .appendAssumeCapacity (makePaddingField (padding * 8 , padFieldCount ));
855
+ lastField = & finals .slice ()[finals .len - 1 ];
856
+ padFieldCount += 1 ;
857
+ } else if (isPaddingField (lastField ) and cfg .collapse_padding ) {
858
+ // Maybe we need to steal padding
859
+ const lfield = lastField .? ;
860
+ const mlp = @divTrunc (@bitSizeOf (LayoutAs ), @alignOf (LayoutAs ) * 8 );
861
+ if (mlp >= 1 ) {
862
+ const stolePadding = @alignOf (LayoutAs ) * mlp ;
863
+ const nsize = @bitSizeOf (lfield .type ) - (stolePadding * 8 );
864
+ finals .set (finals .len - 1 , makePaddingFieldWithName (
865
+ nsize ,
866
+ std .fmt .comptimePrint ("{s},-{}b" , .{ lfield .name , stolePadding * 8 }),
867
+ ));
868
+ offset -= stolePadding ;
869
+ }
870
+ }
871
+
872
+ finals .appendAssumeCapacity (.{
873
+ .alignment = 0 ,
874
+ .default_value = if (field .is_pointer ) &@as (usize , 0 ) else & std .mem .zeroes (field .type ),
875
+ .is_comptime = false ,
876
+ .name = field .name ,
877
+ .type = field .type ,
878
+ });
879
+ lastField = & finals .slice ()[finals .len - 1 ];
880
+ offset += @bitSizeOf (LayoutAs );
881
+ }
882
+ }
883
+
884
+ return @Type (.{
885
+ .@"struct" = .{
886
+ .layout = .@"packed" ,
887
+ .decls = &.{},
888
+ .fields = finals .constSlice (),
889
+ .is_tuple = false ,
890
+ .backing_integer = null ,
891
+ },
892
+ });
893
+ }
0 commit comments