Skip to content

Commit 52caf31

Browse files
authored
Merge pull request #2286 from shritesh/wasm_alloc
WasmAllocator: WebAssembly Memory Allocator
2 parents 71bb8cd + 0c28a18 commit 52caf31

File tree

1 file changed

+91
-0
lines changed

1 file changed

+91
-0
lines changed

std/heap.zig

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -327,6 +327,97 @@ pub const FixedBufferAllocator = struct {
327327
}
328328
};
329329

330+
// FIXME: Exposed LLVM intrinsics is a bug
331+
// See: https://github.com/ziglang/zig/issues/2291
332+
extern fn @"llvm.wasm.memory.size.i32"(u32) u32;
333+
extern fn @"llvm.wasm.memory.grow.i32"(u32, u32) i32;
334+
335+
pub const wasm_allocator = &wasm_allocator_state.allocator;
336+
var wasm_allocator_state = WasmAllocator{
337+
.allocator = Allocator{
338+
.reallocFn = WasmAllocator.realloc,
339+
.shrinkFn = WasmAllocator.shrink,
340+
},
341+
.start_ptr = undefined,
342+
.num_pages = 0,
343+
.end_index = 0,
344+
};
345+
346+
const WasmAllocator = struct {
347+
allocator: Allocator,
348+
start_ptr: [*]u8,
349+
num_pages: usize,
350+
end_index: usize,
351+
352+
fn alloc(allocator: *Allocator, size: usize, alignment: u29) ![]u8 {
353+
const self = @fieldParentPtr(WasmAllocator, "allocator", allocator);
354+
355+
const addr = @ptrToInt(self.start_ptr) + self.end_index;
356+
const adjusted_addr = mem.alignForward(addr, alignment);
357+
const adjusted_index = self.end_index + (adjusted_addr - addr);
358+
const new_end_index = adjusted_index + size;
359+
360+
if (new_end_index > self.num_pages * os.page_size) {
361+
const required_memory = new_end_index - (self.num_pages * os.page_size);
362+
363+
var num_pages: u32 = required_memory / os.page_size;
364+
if (required_memory % os.page_size != 0) {
365+
num_pages += 1;
366+
}
367+
368+
const prev_page = @"llvm.wasm.memory.grow.i32"(0, num_pages);
369+
if (prev_page == -1) {
370+
return error.OutOfMemory;
371+
}
372+
373+
self.num_pages += num_pages;
374+
}
375+
376+
const result = self.start_ptr[adjusted_index..new_end_index];
377+
self.end_index = new_end_index;
378+
379+
return result;
380+
}
381+
382+
// Check if memory is the last "item" and is aligned correctly
383+
fn is_last_item(allocator: *Allocator, memory: []u8, alignment: u29) bool {
384+
const self = @fieldParentPtr(WasmAllocator, "allocator", allocator);
385+
return memory.ptr == self.start_ptr + self.end_index - memory.len and mem.alignForward(@ptrToInt(memory.ptr), alignment) == @ptrToInt(memory.ptr);
386+
}
387+
388+
fn realloc(allocator: *Allocator, old_mem: []u8, old_align: u29, new_size: usize, new_align: u29) ![]u8 {
389+
const self = @fieldParentPtr(WasmAllocator, "allocator", allocator);
390+
391+
// Initialize start_ptr at the first realloc
392+
if (self.num_pages == 0) {
393+
self.start_ptr = @intToPtr([*]u8, @intCast(usize, @"llvm.wasm.memory.size.i32"(0)) * os.page_size);
394+
}
395+
396+
if (is_last_item(allocator, old_mem, new_align)) {
397+
const start_index = self.end_index - old_mem.len;
398+
const new_end_index = start_index + new_size;
399+
400+
if (new_end_index > self.num_pages * os.page_size) {
401+
_ = try alloc(allocator, new_end_index - self.end_index, new_align);
402+
}
403+
const result = self.start_ptr[start_index..new_end_index];
404+
405+
self.end_index = new_end_index;
406+
return result;
407+
} else if (new_size <= old_mem.len and new_align <= old_align) {
408+
return error.OutOfMemory;
409+
} else {
410+
const result = try alloc(allocator, new_size, new_align);
411+
mem.copy(u8, result, old_mem);
412+
return result;
413+
}
414+
}
415+
416+
fn shrink(allocator: *Allocator, old_mem: []u8, old_align: u29, new_size: usize, new_align: u29) []u8 {
417+
return old_mem[0..new_size];
418+
}
419+
};
420+
330421
pub const ThreadSafeFixedBufferAllocator = blk: {
331422
if (builtin.single_threaded) {
332423
break :blk FixedBufferAllocator;

0 commit comments

Comments
 (0)