Skip to content

Commit 1dc3025

Browse files
committed
translate-c: initial support for bitfields
1 parent 242d268 commit 1dc3025

16 files changed

+823
-60
lines changed

lib/compiler/aro_translate_c/ast.zig

Lines changed: 38 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -227,6 +227,9 @@ pub const Node = extern union {
227227
/// [1]type{val} ** count
228228
array_filler,
229229

230+
/// @import("std").zig.c_translation.EmulateBitfieldStruct(S)
231+
helpers_emulate_bitfield_struct,
232+
230233
pub const last_no_payload_tag = Tag.@"break";
231234
pub const no_payload_count = @intFromEnum(last_no_payload_tag) + 1;
232235

@@ -376,6 +379,7 @@ pub const Node = extern union {
376379
.shuffle => Payload.Shuffle,
377380
.builtin_extern => Payload.Extern,
378381
.macro_arithmetic => Payload.MacroArithmetic,
382+
.helpers_emulate_bitfield_struct => Payload.EmulateBitfieldStruct,
379383
};
380384
}
381385

@@ -698,6 +702,14 @@ pub const Payload = struct {
698702
},
699703
};
700704

705+
pub const EmulateBitfieldStruct = struct {
706+
base: Payload,
707+
data: struct {
708+
definition: Node,
709+
cfg: Node,
710+
},
711+
};
712+
701713
pub const StringSlice = struct {
702714
base: Payload,
703715
data: struct {
@@ -917,6 +929,11 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
917929
const import_node = try renderStdImport(c, &.{ "zig", "c_translation", "shuffleVectorIndex" });
918930
return renderCall(c, import_node, &.{ payload.lhs, payload.rhs });
919931
},
932+
.helpers_emulate_bitfield_struct => {
933+
const payload = node.castTag(.helpers_emulate_bitfield_struct).?.data;
934+
const import_node = try renderStdImport(c, &.{ "zig", "c_translation", "EmulateBitfieldStruct" });
935+
return renderCall(c, import_node, &.{ payload.definition, payload.cfg });
936+
},
920937
.vector => {
921938
const payload = node.castTag(.vector).?.data;
922939
return renderBuiltinCall(c, "@Vector", &.{ payload.lhs, payload.rhs });
@@ -2042,25 +2059,34 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
20422059
}
20432060
_ = try c.addToken(.r_brace, "}");
20442061

2045-
if (payload.len < 3) {
2046-
return c.addNode(.{
2047-
.tag = .struct_init_dot_two_comma,
2062+
switch (payload.len) {
2063+
0 => return c.addNode(.{
2064+
.tag = .struct_init_dot_two, // the inits[0], inits[1] are both 0
20482065
.main_token = l_brace,
20492066
.data = .{
20502067
.lhs = inits[0],
20512068
.rhs = inits[1],
20522069
},
2053-
});
2054-
} else {
2055-
const span = try c.listToSpan(inits);
2056-
return c.addNode(.{
2057-
.tag = .struct_init_dot_comma,
2070+
}),
2071+
1, 2 => return c.addNode(.{
2072+
.tag = .struct_init_dot_two_comma,
20582073
.main_token = l_brace,
20592074
.data = .{
2060-
.lhs = span.start,
2061-
.rhs = span.end,
2075+
.lhs = inits[0],
2076+
.rhs = inits[1],
20622077
},
2063-
});
2078+
}),
2079+
else => {
2080+
const span = try c.listToSpan(inits);
2081+
return c.addNode(.{
2082+
.tag = .struct_init_dot_comma,
2083+
.main_token = l_brace,
2084+
.data = .{
2085+
.lhs = span.start,
2086+
.rhs = span.end,
2087+
},
2088+
});
2089+
},
20642090
}
20652091
},
20662092
.container_init => {
@@ -2387,6 +2413,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex {
23872413
.helpers_promoteIntLiteral,
23882414
.helpers_shuffle_vector_index,
23892415
.helpers_flexible_array_type,
2416+
.helpers_emulate_bitfield_struct,
23902417
.std_mem_zeroinit,
23912418
.integer_literal,
23922419
.float_literal,

lib/std/zig/c_translation.zig

Lines changed: 221 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -670,3 +670,224 @@ test "Extended C ABI casting" {
670670
try testing.expect(@TypeOf(Macros.L_SUFFIX(math.maxInt(c_long) + 1)) == c_longlong); // comptime_int -> c_longlong
671671
}
672672
}
673+
674+
const BitfieldEmulation = struct {
675+
/// By default the bits are allocated from LSB to MSB
676+
/// (follows Zig's packed struct and most ABI).
677+
/// Sets to true to allocate from MSB to LSB.
678+
reverse_bits: bool,
679+
/// Most of ABIs starts a new storage unit after a unnamed zero-bit width bit field.
680+
/// Some ABIs ignores that, sets to false.
681+
unnamed_void_boundary: bool,
682+
/// Some ABIs allow a bitfield straddles on storage units.
683+
/// Some ABIs, like MSVC, don't straddle, sets to false.
684+
straddle: bool,
685+
/// Also called 'steal padding'.
686+
/// This option allows to steal the space from the previous padding for the next field.
687+
collapse_padding: bool,
688+
689+
fn fromTarget(target: std.Target) ?BitfieldEmulation {
690+
return switch (target.cpu.arch) {
691+
.x86_64, .x86 => .{
692+
.reverse_bits = false,
693+
.unnamed_void_boundary = true,
694+
.straddle = false,
695+
.collapse_padding = switch (target.os.tag) {
696+
.windows => false,
697+
else => true,
698+
},
699+
},
700+
.aarch64 => .{
701+
.reverse_bits = false,
702+
.unnamed_void_boundary = true,
703+
.straddle = false,
704+
.collapse_padding = true,
705+
},
706+
else => null,
707+
};
708+
}
709+
710+
fn merge(base: BitfieldEmulation, apply: anytype) BitfieldEmulation {
711+
var copy = base;
712+
for (std.meta.fieldNames(@This())) |name| {
713+
if (@hasField(@TypeOf(apply), name)) {
714+
@field(copy, name) = @field(apply, name);
715+
}
716+
}
717+
return copy;
718+
}
719+
};
720+
721+
pub const Bitfield = struct {
722+
/// The field name.
723+
name: [:0]const u8,
724+
/// The actual type of the field. For a bitfield,
725+
/// It's the bit-sized unsigned int.
726+
///
727+
/// Like in C `unsigned field0: 1` the type is `u1`.
728+
type: type,
729+
/// The backing integer for this field.
730+
///
731+
/// Like in C `unsigned field0: 1`, the backing integer is `c_uint`.
732+
backing_integer: ?type = null,
733+
/// If the field is a pointer, it will be treated as `usize` and we avoid accessing the type.
734+
///
735+
/// This helps to avoid the dependency loop problem.
736+
is_pointer: bool = false,
737+
};
738+
739+
fn makePaddingField(comptime bitsize: comptime_int, fieldNameCount: comptime_int) std.builtin.Type.StructField {
740+
return makePaddingFieldWithName(bitsize, std.fmt.comptimePrint(" pad_{},+{}b", .{ fieldNameCount, bitsize }));
741+
}
742+
743+
fn makePaddingFieldWithName(comptime bitsize: comptime_int, fieldName: [:0]const u8) std.builtin.Type.StructField {
744+
const T = @Type(.{ .int = .{
745+
.signedness = .unsigned,
746+
.bits = bitsize,
747+
} });
748+
return .{
749+
.alignment = 0,
750+
.type = T,
751+
.default_value = &std.mem.zeroes(T),
752+
.name = fieldName,
753+
.is_comptime = false,
754+
};
755+
}
756+
757+
fn isPaddingField(field: ?*const std.builtin.Type.StructField) bool {
758+
return if (field) |f| std.mem.startsWith(u8, f.name, " pad_") else false;
759+
}
760+
761+
/// Translate a packed struct type to adapt the C bitfields on the target platform.
762+
///
763+
/// If the target platform is unsupported, an opaque type will be returned.
764+
///
765+
/// `fields` is the struct definition.
766+
/// `modCfg` is the configuration accepted by `BitfieldEmulation.merge`.
767+
///
768+
/// Be advised that, the bitfields have different representation range in different ABI.
769+
/// This function assumes all bitfields are unsigned.
770+
pub fn EmulateBitfieldStruct(comptime fields: []const Bitfield, comptime modCfg: anytype) type {
771+
const cfg = if (BitfieldEmulation.fromTarget(builtin.target)) |cfg|
772+
cfg.merge(modCfg)
773+
else {
774+
return opaque {};
775+
};
776+
777+
// TODO: implement reverse_bits
778+
if (cfg.reverse_bits) @compileError("TODO: reverse_bit is not implemented");
779+
780+
comptime var finals: std.BoundedArray(std.builtin.Type.StructField, fields.len * 2) = .{};
781+
comptime var lastBackingInt: ?type = null;
782+
comptime var leftBitWidth = 0;
783+
comptime var padFieldCount = 0;
784+
comptime var lastField: ?*std.builtin.Type.StructField = null;
785+
// The used space in bits
786+
comptime var offset = 0;
787+
788+
for (fields, 0..fields.len) |field, _| {
789+
if (comptime !field.is_pointer and @typeInfo(field.type) == .@"struct" and @typeInfo(field.type).@"struct".layout == .@"extern") {
790+
return opaque {};
791+
}
792+
if (field.backing_integer) |BackingInt| {
793+
const requiredBits = @typeInfo(field.type).int.bits;
794+
if (leftBitWidth < requiredBits) {
795+
if (!cfg.straddle and (leftBitWidth > 0)) {
796+
// add padding to use a new unit for the next field
797+
finals.appendAssumeCapacity(makePaddingField(leftBitWidth, padFieldCount));
798+
lastField = &finals.slice()[finals.len - 1];
799+
padFieldCount += 1;
800+
leftBitWidth = 0;
801+
}
802+
803+
if (offset % @alignOf(BackingInt) != 0) {
804+
const padding = (@divTrunc(offset, @alignOf(BackingInt)) + 1) * @alignOf(BackingInt) - offset;
805+
offset += padding;
806+
807+
finals.appendAssumeCapacity(makePaddingField(padding * 8, padFieldCount));
808+
lastField = &finals.slice()[finals.len - 1];
809+
padFieldCount += 1;
810+
} else if (isPaddingField(lastField) and cfg.collapse_padding) {
811+
// Maybe we need to steal padding
812+
const lfield = lastField.?;
813+
const mlp = @divTrunc(@bitSizeOf(lfield.type), @alignOf(BackingInt) * 8);
814+
if (mlp >= 1) {
815+
const stolePadding = @alignOf(BackingInt) * mlp;
816+
const nsize = @bitSizeOf(lfield.type) - (stolePadding * 8);
817+
fields.set(fields.len - 1, makePaddingFieldWithName(
818+
nsize,
819+
std.fmt.comptimePrint("{s},-{}b", .{ lfield.name, stolePadding * 8 }),
820+
));
821+
offset -= stolePadding;
822+
}
823+
}
824+
825+
lastBackingInt = BackingInt;
826+
leftBitWidth += @bitSizeOf(BackingInt);
827+
}
828+
829+
leftBitWidth -= @bitSizeOf(field.type);
830+
finals.appendAssumeCapacity(.{
831+
.alignment = 0,
832+
.default_value = &std.mem.zeroes(field.type),
833+
.is_comptime = false,
834+
.name = field.name,
835+
.type = field.type,
836+
});
837+
lastField = &finals.slice()[finals.len - 1];
838+
} else {
839+
const LayoutAs = if (field.is_pointer) usize else field.type;
840+
841+
if (leftBitWidth > 0) {
842+
finals.appendAssumeCapacity(makePaddingField(leftBitWidth, padFieldCount));
843+
lastField = &finals.slice()[finals.len - 1];
844+
padFieldCount += 1;
845+
offset += leftBitWidth;
846+
}
847+
leftBitWidth = 0;
848+
lastBackingInt = null;
849+
850+
if (offset % @alignOf(LayoutAs) != 0) {
851+
const padding = (@divTrunc(offset, @alignOf(LayoutAs)) + 1) * @alignOf(LayoutAs) - offset;
852+
offset += padding;
853+
854+
finals.appendAssumeCapacity(makePaddingField(padding * 8, padFieldCount));
855+
lastField = &finals.slice()[finals.len - 1];
856+
padFieldCount += 1;
857+
} else if (isPaddingField(lastField) and cfg.collapse_padding) {
858+
// Maybe we need to steal padding
859+
const lfield = lastField.?;
860+
const mlp = @divTrunc(@bitSizeOf(LayoutAs), @alignOf(LayoutAs) * 8);
861+
if (mlp >= 1) {
862+
const stolePadding = @alignOf(LayoutAs) * mlp;
863+
const nsize = @bitSizeOf(lfield.type) - (stolePadding * 8);
864+
finals.set(finals.len - 1, makePaddingFieldWithName(
865+
nsize,
866+
std.fmt.comptimePrint("{s},-{}b", .{ lfield.name, stolePadding * 8 }),
867+
));
868+
offset -= stolePadding;
869+
}
870+
}
871+
872+
finals.appendAssumeCapacity(.{
873+
.alignment = 0,
874+
.default_value = if (field.is_pointer) &@as(usize, 0) else &std.mem.zeroes(field.type),
875+
.is_comptime = false,
876+
.name = field.name,
877+
.type = field.type,
878+
});
879+
lastField = &finals.slice()[finals.len - 1];
880+
offset += @bitSizeOf(LayoutAs);
881+
}
882+
}
883+
884+
return @Type(.{
885+
.@"struct" = .{
886+
.layout = .@"packed",
887+
.decls = &.{},
888+
.fields = finals.constSlice(),
889+
.is_tuple = false,
890+
.backing_integer = null,
891+
},
892+
});
893+
}

src/clang.zig

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -487,6 +487,12 @@ pub const FieldDecl = opaque {
487487
pub const isBitField = ZigClangFieldDecl_isBitField;
488488
extern fn ZigClangFieldDecl_isBitField(*const FieldDecl) bool;
489489

490+
pub const isUnnamedBitField = ZigClangFieldDecl_isUnnamedBitField;
491+
extern fn ZigClangFieldDecl_isUnnamedBitField(*const FieldDecl) bool;
492+
493+
pub const getBitWidthValue = ZigClangFieldDecl_getBitWidthValue;
494+
extern fn ZigClangFieldDecl_getBitWidthValue(*const FieldDecl, *const ASTContext) c_uint;
495+
490496
pub const getType = ZigClangFieldDecl_getType;
491497
extern fn ZigClangFieldDecl_getType(*const FieldDecl) QualType;
492498

0 commit comments

Comments
 (0)