Skip to content

sized deallocation fixes #17012

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 17 additions & 2 deletions src/liballoc/heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

use core::ptr::RawPtr;
#[cfg(not(test))] use core::raw;
#[cfg(not(test))] use util;
#[cfg(stage0, not(test))] use util;

/// Returns a pointer to `size` bytes of memory.
///
Expand Down Expand Up @@ -119,7 +119,7 @@ unsafe fn exchange_free(ptr: *mut u8, size: uint, align: uint) {
}

// FIXME: #7496
#[cfg(not(test))]
#[cfg(stage0, not(test))]
#[lang="closure_exchange_malloc"]
#[inline]
#[allow(deprecated)]
Expand All @@ -134,6 +134,21 @@ unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint,
alloc as *mut u8
}

// FIXME: #7496
#[cfg(not(stage0), not(test))]
#[lang="closure_exchange_malloc"]
#[inline]
#[allow(deprecated)]
unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint,
align: uint) -> *mut u8 {
let p = allocate(size, align);

let alloc = p as *mut raw::Box<()>;
(*alloc).drop_glue = drop_glue;

alloc as *mut u8
}

#[cfg(jemalloc)]
mod imp {
use core::option::{None, Option};
Expand Down
97 changes: 50 additions & 47 deletions src/libarena/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,17 +39,18 @@ use std::mem;
use std::num;
use std::ptr;
use std::rc::Rc;
use std::rt::heap::allocate;
use std::rt::heap::{allocate, deallocate};

// The way arena uses arrays is really deeply awful. The arrays are
// allocated, and have capacities reserved, but the fill for the array
// will always stay at 0.
#[deriving(Clone, PartialEq)]
struct Chunk {
data: Rc<RefCell<Vec<u8> >>,
data: Rc<RefCell<Vec<u8>>>,
fill: Cell<uint>,
is_copy: Cell<bool>,
}

impl Chunk {
fn capacity(&self) -> uint {
self.data.borrow().capacity()
Expand Down Expand Up @@ -357,38 +358,37 @@ pub struct TypedArena<T> {
end: Cell<*const T>,

/// A pointer to the first arena segment.
first: RefCell<TypedArenaChunkRef<T>>,
first: RefCell<*mut TypedArenaChunk<T>>,
}
type TypedArenaChunkRef<T> = Option<Box<TypedArenaChunk<T>>>;

struct TypedArenaChunk<T> {
/// Pointer to the next arena segment.
next: TypedArenaChunkRef<T>,
next: *mut TypedArenaChunk<T>,

/// The number of elements that this chunk can hold.
capacity: uint,

// Objects follow here, suitably aligned.
}

fn calculate_size<T>(capacity: uint) -> uint {
let mut size = mem::size_of::<TypedArenaChunk<T>>();
size = round_up(size, mem::min_align_of::<T>());
let elem_size = mem::size_of::<T>();
let elems_size = elem_size.checked_mul(&capacity).unwrap();
size = size.checked_add(&elems_size).unwrap();
size
}

impl<T> TypedArenaChunk<T> {
#[inline]
fn new(next: Option<Box<TypedArenaChunk<T>>>, capacity: uint)
-> Box<TypedArenaChunk<T>> {
let mut size = mem::size_of::<TypedArenaChunk<T>>();
size = round_up(size, mem::min_align_of::<T>());
let elem_size = mem::size_of::<T>();
let elems_size = elem_size.checked_mul(&capacity).unwrap();
size = size.checked_add(&elems_size).unwrap();

let mut chunk = unsafe {
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>());
let mut chunk: Box<TypedArenaChunk<T>> = mem::transmute(chunk);
ptr::write(&mut chunk.next, next);
chunk
};

chunk.capacity = capacity;
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: uint)
-> *mut TypedArenaChunk<T> {
let size = calculate_size::<T>(capacity);
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
as *mut TypedArenaChunk<T>;
(*chunk).next = next;
(*chunk).capacity = capacity;
chunk
}

Expand All @@ -406,14 +406,13 @@ impl<T> TypedArenaChunk<T> {
}

// Destroy the next chunk.
let next_opt = mem::replace(&mut self.next, None);
match next_opt {
None => {}
Some(mut next) => {
// We assume that the next chunk is completely filled.
let capacity = next.capacity;
next.destroy(capacity)
}
let next = self.next;
let size = calculate_size::<T>(self.capacity);
deallocate(self as *mut TypedArenaChunk<T> as *mut u8, size,
mem::min_align_of::<TypedArenaChunk<T>>());
if next.is_not_null() {
let capacity = (*next).capacity;
(*next).destroy(capacity);
}
}

Expand Down Expand Up @@ -448,11 +447,13 @@ impl<T> TypedArena<T> {
/// objects.
#[inline]
pub fn with_capacity(capacity: uint) -> TypedArena<T> {
let chunk = TypedArenaChunk::<T>::new(None, capacity);
TypedArena {
ptr: Cell::new(chunk.start() as *const T),
end: Cell::new(chunk.end() as *const T),
first: RefCell::new(Some(chunk)),
unsafe {
let chunk = TypedArenaChunk::<T>::new(ptr::mut_null(), capacity);
TypedArena {
ptr: Cell::new((*chunk).start() as *const T),
end: Cell::new((*chunk).end() as *const T),
first: RefCell::new(chunk),
}
}
}

Expand All @@ -476,26 +477,28 @@ impl<T> TypedArena<T> {
/// Grows the arena.
#[inline(never)]
fn grow(&self) {
let chunk = self.first.borrow_mut().take().unwrap();
let new_capacity = chunk.capacity.checked_mul(&2).unwrap();
let chunk = TypedArenaChunk::<T>::new(Some(chunk), new_capacity);
self.ptr.set(chunk.start() as *const T);
self.end.set(chunk.end() as *const T);
*self.first.borrow_mut() = Some(chunk)
unsafe {
let chunk = *self.first.borrow_mut();
let new_capacity = (*chunk).capacity.checked_mul(&2).unwrap();
let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
self.ptr.set((*chunk).start() as *const T);
self.end.set((*chunk).end() as *const T);
*self.first.borrow_mut() = chunk
}
}
}

#[unsafe_destructor]
impl<T> Drop for TypedArena<T> {
fn drop(&mut self) {
// Determine how much was filled.
let start = self.first.borrow().as_ref().unwrap().start() as uint;
let end = self.ptr.get() as uint;
let diff = (end - start) / mem::size_of::<T>();

// Pass that to the `destroy` method.
unsafe {
self.first.borrow_mut().as_mut().unwrap().destroy(diff)
// Determine how much was filled.
let start = self.first.borrow().as_ref().unwrap().start() as uint;
let end = self.ptr.get() as uint;
let diff = (end - start) / mem::size_of::<T>();

// Pass that to the `destroy` method.
(**self.first.borrow_mut()).destroy(diff)
}
}
}
Expand Down
35 changes: 33 additions & 2 deletions src/librustc/middle/trans/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ use middle::trans::glue;
use middle::trans::inline;
use middle::trans::intrinsic;
use middle::trans::machine;
use middle::trans::machine::{llsize_of, llsize_of_real};
use middle::trans::machine::{llsize_of, llsize_of_real, llalign_of_min};
use middle::trans::meth;
use middle::trans::monomorphize;
use middle::trans::tvec;
Expand Down Expand Up @@ -382,13 +382,44 @@ pub fn malloc_raw_dyn<'a>(bcx: &'a Block<'a>,
Result::new(r.bcx, PointerCast(r.bcx, r.val, llty_ptr))
}

pub fn malloc_raw_dyn_proc<'a>(
bcx: &'a Block<'a>,
t: ty::t, alloc_fn: LangItem) -> Result<'a> {
let _icx = push_ctxt("malloc_raw_dyn_proc");
let ccx = bcx.ccx();

let langcall = require_alloc_fn(bcx, t, alloc_fn);

// Grab the TypeRef type of ptr_ty.
let ptr_ty = ty::mk_uniq(bcx.tcx(), t);
let ptr_llty = type_of(ccx, ptr_ty);

let llty = type_of(bcx.ccx(), t);
let size = llsize_of(bcx.ccx(), llty);
let llalign = C_uint(ccx, llalign_of_min(bcx.ccx(), llty) as uint);

// Allocate space:
let drop_glue = glue::get_drop_glue(ccx, ty::mk_uniq(bcx.tcx(), t));
let r = callee::trans_lang_call(
bcx,
langcall,
[
PointerCast(bcx, drop_glue, Type::glue_fn(ccx, Type::i8p(ccx)).ptr_to()),
size,
llalign
],
None);
Result::new(r.bcx, PointerCast(r.bcx, r.val, ptr_llty))
}


pub fn malloc_raw_dyn_managed<'a>(
bcx: &'a Block<'a>,
t: ty::t,
alloc_fn: LangItem,
size: ValueRef)
-> Result<'a> {
let _icx = push_ctxt("malloc_raw_managed");
let _icx = push_ctxt("malloc_raw_dyn_managed");
let ccx = bcx.ccx();

let langcall = require_alloc_fn(bcx, t, alloc_fn);
Expand Down
55 changes: 55 additions & 0 deletions src/librustc/middle/trans/cleanup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,27 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
self.schedule_clean(cleanup_scope, drop as CleanupObj);
}

fn schedule_free_slice(&self,
cleanup_scope: ScopeId,
val: ValueRef,
size: ValueRef,
align: ValueRef,
heap: Heap) {
/*!
* Schedules a call to `free(val)`. Note that this is a shallow
* operation.
*/

let drop = box FreeSlice { ptr: val, size: size, align: align, heap: heap };

debug!("schedule_free_slice({:?}, val={}, heap={:?})",
cleanup_scope,
self.ccx.tn().val_to_string(val),
heap);

self.schedule_clean(cleanup_scope, drop as CleanupObj);
}

fn schedule_clean(&self,
cleanup_scope: ScopeId,
cleanup: CleanupObj) {
Expand Down Expand Up @@ -926,6 +947,34 @@ impl Cleanup for FreeValue {
}
}

pub struct FreeSlice {
ptr: ValueRef,
size: ValueRef,
align: ValueRef,
heap: Heap,
}

impl Cleanup for FreeSlice {
fn must_unwind(&self) -> bool {
true
}

fn clean_on_unwind(&self) -> bool {
true
}

fn trans<'a>(&self, bcx: &'a Block<'a>) -> &'a Block<'a> {
match self.heap {
HeapManaged => {
glue::trans_free(bcx, self.ptr)
}
HeapExchange => {
glue::trans_exchange_free_dyn(bcx, self.ptr, self.size, self.align)
}
}
}
}

pub struct LifetimeEnd {
ptr: ValueRef,
}
Expand Down Expand Up @@ -1020,6 +1069,12 @@ pub trait CleanupMethods<'a> {
val: ValueRef,
heap: Heap,
content_ty: ty::t);
fn schedule_free_slice(&self,
cleanup_scope: ScopeId,
val: ValueRef,
size: ValueRef,
align: ValueRef,
heap: Heap);
fn schedule_clean(&self,
cleanup_scope: ScopeId,
cleanup: CleanupObj);
Expand Down
8 changes: 2 additions & 6 deletions src/librustc/middle/trans/closure.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ use middle::trans::common::*;
use middle::trans::datum::{Datum, DatumBlock, Expr, Lvalue, rvalue_scratch_datum};
use middle::trans::debuginfo;
use middle::trans::expr;
use middle::trans::machine::llsize_of;
use middle::trans::type_of::*;
use middle::trans::type_::Type;
use middle::ty;
Expand Down Expand Up @@ -144,15 +143,12 @@ fn allocate_cbox<'a>(bcx: &'a Block<'a>,
let tcx = bcx.tcx();

// Allocate and initialize the box:
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
match store {
ty::UniqTraitStore => {
let ty = type_of(bcx.ccx(), cdata_ty);
let size = llsize_of(bcx.ccx(), ty);
// we treat proc as @ here, which isn't ideal
malloc_raw_dyn_managed(bcx, cdata_ty, ClosureExchangeMallocFnLangItem, size)
malloc_raw_dyn_proc(bcx, cbox_ty, ClosureExchangeMallocFnLangItem)
}
ty::RegionTraitStore(..) => {
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
let llbox = alloc_ty(bcx, cbox_ty, "__closure");
Result::new(bcx, llbox)
}
Expand Down
Loading