Zig Version
0.11.0-dev.4308+417b92f08
Steps to Reproduce and Observed Behavior
I'm trying to set up the GDT. I'm using a packed struct with the alignment of u64. However, when I assign to the struct, the statically-assigned entries have all the bits on the first field set (which is correct) but no other field is set (even though I fully initialized the struct). This happens both when using packed unions to represent bitfields and when just using normal integral types.
To reproduce, all that's needed is this code:
const std = @import("std");
const debug = std.debug;
const GdtEntry = packed struct {
limit_lo: u16 = 0,
base_lo: u24 = 0,
access: u8 = 0x00,
limit_hi: u4 = 0,
flags: u4 = 0x0,
base_hi: u8 = 0,
pub fn asU64(self: GdtEntry) u64 {
return @bitCast(self);
}
};
comptime {
if (@sizeOf(GdtEntry) != 8 or @bitSizeOf(GdtEntry) != 64) @compileError("GdtEntry must be 8 bytes!");
if (@bitOffsetOf(GdtEntry, "limit_lo") != 0) @compileError("Limit lo must be at bit offset 0!");
if (@bitOffsetOf(GdtEntry, "base_lo") == 15) @compileError("base_lo must be at bit offset 16!");
if (@bitOffsetOf(GdtEntry, "access") != 40) @compileError("access byte must be at bit offset 40!");
if (@bitOffsetOf(GdtEntry, "limit_hi") != 48) @compileError("limit_hi must be at bit offset 48!");
if (@bitOffsetOf(GdtEntry, "flags") != 52) @compileError("flags must be a bit offset 52!");
if (@bitOffsetOf(GdtEntry, "base_hi") != 56) @compileError("base_hi must be at bit offset 56!");
}
var gdt = [_]GdtEntry{.{}} ** 8;
var ist1 = [_]u8{0} ** 2048;
var ist2 = [_]u8{0} ** 2048;
var ist3 = [_]u8{0} ** 2048;
var ist4 = [_]u8{0} ** 2048;
var ist5 = [_]u8{0} ** 2048;
var ist6 = [_]u8{0} ** 2048;
var ist7 = [_]u8{0} ** 2048;
var stack0 = [_]u8{0} ** 4096;
var stack1 = [_]u8{0} ** 4096;
var stack2 = [_]u8{0} ** 4096;
const TssDescriptor = packed struct {
reserved1: u32 = 0,
rsp0: u64,
rsp1: u64,
rsp2: u64,
reserved2: u64 = 0,
ist1: u64,
ist2: u64,
ist3: u64,
ist4: u64,
ist5: u64,
ist6: u64,
ist7: u64,
reserved3: u32 = 0,
reserved4: u32 = 0,
reserved5: u8 = 0,
iopb: u16,
};
comptime {
if (@sizeOf(TssDescriptor) != 104) @compileError("TSS descriptor must be 104 bytes in size");
}
var tss: TssDescriptor = undefined;
pub fn main() void {
// Initial GDT
// 64-bit kernel code
gdt[1] = .{
.limit_lo = 0xFFFF,
.limit_hi = 0xF,
.access = 0x9B,
.flags = 0xA,
};
// 64-bit kernel data
gdt[2] = .{
.limit_lo = 0xFFFF,
.limit_hi = 0xF,
.access = 0x93,
.flags = 0xC,
};
// 64-bit user code
gdt[3] = .{
.limit_lo = 0xFFFF,
.limit_hi = 0xF,
.access = 0xFB,
.flags = 0xA,
};
// 64-bit user data
gdt[4] = .{
.limit_lo = 0xFFFF,
.limit_hi = 0xF,
.access = 0xF3,
.flags = 0xC,
};
tss = TssDescriptor{
.rsp0 = @intFromPtr(&stack0),
.rsp1 = @intFromPtr(&stack1),
.rsp2 = @intFromPtr(&stack2),
.ist1 = @intFromPtr(&ist1),
.ist2 = @intFromPtr(&ist2),
.ist3 = @intFromPtr(&ist3),
.ist4 = @intFromPtr(&ist4),
.ist5 = @intFromPtr(&ist5),
.ist6 = @intFromPtr(&ist6),
.ist7 = @intFromPtr(&ist7),
.iopb = @sizeOf(TssDescriptor),
};
const tss_base = @intFromPtr(&tss);
const tss_limit = @sizeOf(TssDescriptor) - 1;
gdt[5] = .{
.base_lo = @truncate(tss_base),
.base_hi = @truncate(tss_base >> 24),
.limit_lo = @truncate(tss_limit),
.limit_hi = @truncate(tss_limit >> 16),
.access = 0x89,
};
gdt[6] = .{
.base_lo = @truncate(tss_base >> 32),
.base_hi = @truncate(tss_base >> 56),
};
for (gdt, 0..) |entry, i| {
std.debug.print("GDT entry {}: {X:0<16}\n", .{ i, entry.asU64() });
}
}
When run, this program prints the following output:
GDT entry 0: 0000000000000000
GDT entry 1: FFFF000000000000
GDT entry 2: FFFF000000000000
GDT entry 3: FFFF000000000000
GDT entry 4: FFFF000000000000
GDT entry 5: 7B0E894800000067
GDT entry 6: F700000000000000
GDT entry 7: 0000000000000000
The zeroth and seventh entries are correct (and I strongly believe the fifth and sixth are also correct).
Expected Behavior
The assignments should work fine, without any problems, and should reflect the properly assigned values.
Zig Version
0.11.0-dev.4308+417b92f08
Steps to Reproduce and Observed Behavior
I'm trying to set up the GDT. I'm using a packed struct with the alignment of u64. However, when I assign to the struct, the statically-assigned entries have all the bits on the first field set (which is correct) but no other field is set (even though I fully initialized the struct). This happens both when using packed unions to represent bitfields and when just using normal integral types.
To reproduce, all that's needed is this code:
When run, this program prints the following output:
The zeroth and seventh entries are correct (and I strongly believe the fifth and sixth are also correct).
Expected Behavior
The assignments should work fine, without any problems, and should reflect the properly assigned values.