diff --git a/.gitignore b/.gitignore index f9cd0f4f764ab..0853e56e3cbeb 100644 --- a/.gitignore +++ b/.gitignore @@ -78,3 +78,4 @@ x86_64-apple-darwin/ doc/core/ tmp.*.rs config.stamp +.DS_Store diff --git a/src/cargo/cargo.rs b/src/cargo/cargo.rs index 94efae2442f8a..2bfde838bce43 100644 --- a/src/cargo/cargo.rs +++ b/src/cargo/cargo.rs @@ -9,11 +9,9 @@ import syntax::diagnostic; import result::{ok, err}; import io::writer_util; -import result; import std::{map, json, tempfile, term, sort, getopts}; import map::hashmap; -import str; -import vec; +import json::to_str; import getopts::{optflag, optopt, opt_present}; type package = { @@ -399,28 +397,28 @@ fn parse_source(name: str, j: json::json) -> source { } alt j { - json::dict(_j) { - let mut url = alt _j.find("url") { + json::dict(j) { + let mut url = alt j.find("url") { some(json::string(u)) { - u + *u } _ { fail "needed 'url' field in source"; } }; - let method = alt _j.find("method") { + let method = alt j.find("method") { some(json::string(u)) { - u + *u } _ { assume_source_method(url) } }; - let key = alt _j.find("key") { + let key = alt j.find("key") { some(json::string(u)) { - some(u) + some(*u) } _ { none } }; - let keyfp = alt _j.find("keyfp") { + let keyfp = alt j.find("keyfp") { some(json::string(u)) { - some(u) + some(*u) } _ { none } }; @@ -450,20 +448,20 @@ fn try_parse_sources(filename: str, sources: map::hashmap) { } } ok(_) { fail "malformed sources.json"; } - err(e) { fail #fmt("%s:%u:%u: %s", filename, e.line, e.col, e.msg); } + err(e) { fail #fmt("%s:%s", filename, e.to_str()); } } } fn load_one_source_package(src: source, p: map::hashmap) { let name = alt p.find("name") { - some(json::string(_n)) { - if !valid_pkg_name(_n) { - warn("malformed source json: " + src.name + ", '" + _n + "'"+ + some(json::string(n)) { + if !valid_pkg_name(*n) { + warn("malformed source json: " + src.name + ", '" + *n + "'"+ " is an invalid name (alphanumeric, underscores and" + " dashes only)"); ret; } - _n + *n } _ { warn("malformed source json: " + src.name + " (missing name)"); @@ -472,13 +470,13 @@ fn load_one_source_package(src: source, p: map::hashmap) { }; let uuid = alt p.find("uuid") { - some(json::string(_n)) { - if !is_uuid(_n) { - warn("malformed source json: " + src.name + ", '" + _n + "'"+ + some(json::string(n)) { + if !is_uuid(*n) { + warn("malformed source json: " + src.name + ", '" + *n + "'"+ " is an invalid uuid"); ret; } - _n + *n } _ { warn("malformed source json: " + src.name + " (missing uuid)"); @@ -487,7 +485,7 @@ fn load_one_source_package(src: source, p: map::hashmap) { }; let url = alt p.find("url") { - some(json::string(_n)) { _n } + some(json::string(n)) { *n } _ { warn("malformed source json: " + src.name + " (missing url)"); ret; @@ -495,7 +493,7 @@ fn load_one_source_package(src: source, p: map::hashmap) { }; let method = alt p.find("method") { - some(json::string(_n)) { _n } + some(json::string(n)) { *n } _ { warn("malformed source json: " + src.name + " (missing method)"); ret; @@ -503,16 +501,16 @@ fn load_one_source_package(src: source, p: map::hashmap) { }; let ref = alt p.find("ref") { - some(json::string(_n)) { some(_n) } + some(json::string(n)) { some(*n) } _ { none } }; let mut tags = []; alt p.find("tags") { some(json::list(js)) { - for js.each {|j| + for (*js).each {|j| alt j { - json::string(_j) { vec::grow(tags, 1u, _j); } + json::string(j) { vec::grow(tags, 1u, *j); } _ { } } } @@ -521,7 +519,7 @@ fn load_one_source_package(src: source, p: map::hashmap) { } let description = alt p.find("description") { - some(json::string(_n)) { _n } + some(json::string(n)) { *n } _ { warn("malformed source json: " + src.name + " (missing description)"); @@ -570,7 +568,7 @@ fn load_source_info(c: cargo, src: source) { "(source info is not a dict)"); } err(e) { - warn(#fmt("%s:%u:%u: %s", src.name, e.line, e.col, e.msg)); + warn(#fmt("%s:%s", src.name, e.to_str())); } }; } @@ -582,10 +580,10 @@ fn load_source_packages(c: cargo, src: source) { let pkgstr = io::read_whole_file_str(pkgfile); alt json::from_str(result::get(pkgstr)) { ok(json::list(js)) { - for js.each {|_j| - alt _j { - json::dict(_p) { - load_one_source_package(src, _p); + for (*js).each {|j| + alt j { + json::dict(p) { + load_one_source_package(src, p); } _ { warn("malformed source json: " + src.name + @@ -599,7 +597,7 @@ fn load_source_packages(c: cargo, src: source) { "(packages is not a list)"); } err(e) { - warn(#fmt("%s:%u:%u: %s", src.name, e.line, e.col, e.msg)); + warn(#fmt("%s:%s", src.name, e.to_str())); } }; } @@ -766,8 +764,8 @@ fn install_one_crate(c: cargo, path: str, cf: str) { fn rustc_sysroot() -> str { alt os::self_exe_path() { - some(_path) { - let path = [_path, "..", "bin", "rustc"]; + some(path) { + let path = [path, "..", "bin", "rustc"]; check vec::is_not_empty(path); let rustc = path::normalize(path::connect_many(path)); #debug(" rustc: %s", rustc); @@ -1578,18 +1576,18 @@ fn dump_sources(c: cargo) { let chash = map::str_hash(); let child = json::dict(chash); - chash.insert("url", json::string(v.url)); - chash.insert("method", json::string(v.method)); + chash.insert("url", json::string(@v.url)); + chash.insert("method", json::string(@v.method)); alt copy v.key { some(key) { - chash.insert("key", json::string(key)); + chash.insert("key", json::string(@key)); } _ {} } alt copy v.keyfp { some(keyfp) { - chash.insert("keyfp", json::string(keyfp)); + chash.insert("keyfp", json::string(@keyfp)); } _ {} } diff --git a/src/libcore/int-template.rs b/src/libcore/int-template.rs index cfe6169760868..b1d1457654e22 100644 --- a/src/libcore/int-template.rs +++ b/src/libcore/int-template.rs @@ -16,21 +16,21 @@ export ord, eq, num; const min_value: T = -1 as T << (inst::bits - 1 as T); const max_value: T = min_value - 1 as T; -pure fn min(x: T, y: T) -> T { if x < y { x } else { y } } -pure fn max(x: T, y: T) -> T { if x > y { x } else { y } } - -pure fn add(x: T, y: T) -> T { x + y } -pure fn sub(x: T, y: T) -> T { x - y } -pure fn mul(x: T, y: T) -> T { x * y } -pure fn div(x: T, y: T) -> T { x / y } -pure fn rem(x: T, y: T) -> T { x % y } - -pure fn lt(x: T, y: T) -> bool { x < y } -pure fn le(x: T, y: T) -> bool { x <= y } -pure fn eq(x: T, y: T) -> bool { x == y } -pure fn ne(x: T, y: T) -> bool { x != y } -pure fn ge(x: T, y: T) -> bool { x >= y } -pure fn gt(x: T, y: T) -> bool { x > y } +pure fn min(&&x: T, &&y: T) -> T { if x < y { x } else { y } } +pure fn max(&&x: T, &&y: T) -> T { if x > y { x } else { y } } + +pure fn add(&&x: T, &&y: T) -> T { x + y } +pure fn sub(&&x: T, &&y: T) -> T { x - y } +pure fn mul(&&x: T, &&y: T) -> T { x * y } +pure fn div(&&x: T, &&y: T) -> T { x / y } +pure fn rem(&&x: T, &&y: T) -> T { x % y } + +pure fn lt(&&x: T, &&y: T) -> bool { x < y } +pure fn le(&&x: T, &&y: T) -> bool { x <= y } +pure fn eq(&&x: T, &&y: T) -> bool { x == y } +pure fn ne(&&x: T, &&y: T) -> bool { x != y } +pure fn ge(&&x: T, &&y: T) -> bool { x >= y } +pure fn gt(&&x: T, &&y: T) -> bool { x > y } pure fn is_positive(x: T) -> bool { x > 0 as T } pure fn is_negative(x: T) -> bool { x < 0 as T } diff --git a/src/libcore/int-template/int.rs b/src/libcore/int-template/int.rs index b7c4d1717bbca..d28333c79e63d 100644 --- a/src/libcore/int-template/int.rs +++ b/src/libcore/int-template/int.rs @@ -7,7 +7,7 @@ const bits: T = 32 as T; const bits: T = 64 as T; #[doc = "Produce a uint suitable for use in a hash table"] -pure fn hash(x: int) -> uint { ret x as uint; } +pure fn hash(&&x: int) -> uint { ret x as uint; } #[doc = "Returns `base` raised to the power of `exponent`"] fn pow(base: int, exponent: uint) -> int { diff --git a/src/libcore/str.rs b/src/libcore/str.rs index d62beaa184d28..cf81dcd7157ff 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -222,7 +222,7 @@ pure fn from_char(ch: char) -> str { } #[doc = "Convert a vector of chars to a string"] -pure fn from_chars(chs: [char]) -> str { +pure fn from_chars(chs: [const char]/&) -> str { let mut buf = ""; unchecked { reserve(buf, chs.len()); @@ -232,7 +232,7 @@ pure fn from_chars(chs: [char]) -> str { } #[doc = "Concatenate a vector of strings"] -pure fn concat(v: [str]) -> str { +pure fn concat(v: [const str]/&) -> str { let mut s: str = ""; for vec::each(v) {|ss| s += ss; } ret s; @@ -241,7 +241,7 @@ pure fn concat(v: [str]) -> str { #[doc = " Concatenate a vector of strings, placing a given separator between each "] -pure fn connect(v: [str], sep: str) -> str { +pure fn connect(v: [const str]/&a, sep: str) -> str { let mut s = "", first = true; for vec::each(v) {|ss| if first { first = false; } else { s += sep; } @@ -338,7 +338,7 @@ pure fn byte_slice(s: str/&, f: fn([u8]/&) -> T) -> T unsafe { } #[doc = "Convert a string to a vector of characters"] -pure fn chars(s: str) -> [char] { +pure fn chars(s: str/&) -> [char] { let mut buf = [], i = 0u; let len = len(s); while i < len { @@ -355,7 +355,7 @@ Take a substring of another. Returns a string containing `n` characters starting at byte offset `begin`. "] -pure fn substr(s: str, begin: uint, n: uint) -> str { +pure fn substr(s: str/&, begin: uint, n: uint) -> str { slice(s, begin, begin + count_bytes(s, begin, n)) } @@ -365,7 +365,7 @@ Returns a slice of the given string from the byte range [`begin`..`end`) Fails when `begin` and `end` do not point to valid characters or beyond the last character of the string "] -pure fn slice(s: str, begin: uint, end: uint) -> str unsafe { +pure fn slice(s: str/&, begin: uint, end: uint) -> str unsafe { assert is_char_boundary(s, begin); assert is_char_boundary(s, end); unsafe::slice_bytes(s, begin, end) @@ -374,7 +374,7 @@ pure fn slice(s: str, begin: uint, end: uint) -> str unsafe { #[doc = " Splits a string into substrings at each occurrence of a given character "] -pure fn split_char(s: str, sep: char) -> [str] { +pure fn split_char(s: str/&, sep: char) -> [str] { split_char_inner(s, sep, len(s), true) } @@ -384,18 +384,18 @@ character up to 'count' times The byte must be a valid UTF-8/ASCII byte "] -pure fn splitn_char(s: str, sep: char, count: uint) -> [str] { +pure fn splitn_char(s: str/&, sep: char, count: uint) -> [str] { split_char_inner(s, sep, count, true) } #[doc = " Like `split_char`, but omits empty strings from the returned vector "] -pure fn split_char_nonempty(s: str, sep: char) -> [str] { +pure fn split_char_nonempty(s: str/&, sep: char) -> [str] { split_char_inner(s, sep, len(s), false) } -pure fn split_char_inner(s: str, sep: char, count: uint, allow_empty: bool) +pure fn split_char_inner(s: str/&, sep: char, count: uint, allow_empty: bool) -> [str] unsafe { if sep < 128u as char { let b = sep as u8, l = len(s); @@ -422,7 +422,7 @@ pure fn split_char_inner(s: str, sep: char, count: uint, allow_empty: bool) #[doc = "Splits a string into substrings using a character function"] -pure fn split(s: str, sepfn: fn(char) -> bool) -> [str] { +pure fn split(s: str/&, sepfn: fn(char) -> bool) -> [str] { split_inner(s, sepfn, len(s), true) } @@ -430,16 +430,16 @@ pure fn split(s: str, sepfn: fn(char) -> bool) -> [str] { Splits a string into substrings using a character function, cutting at most `count` times. "] -pure fn splitn(s: str, sepfn: fn(char) -> bool, count: uint) -> [str] { +pure fn splitn(s: str/&, sepfn: fn(char) -> bool, count: uint) -> [str] { split_inner(s, sepfn, count, true) } #[doc = "Like `split`, but omits empty strings from the returned vector"] -pure fn split_nonempty(s: str, sepfn: fn(char) -> bool) -> [str] { +pure fn split_nonempty(s: str/&, sepfn: fn(char) -> bool) -> [str] { split_inner(s, sepfn, len(s), false) } -pure fn split_inner(s: str, sepfn: fn(cc: char) -> bool, count: uint, +pure fn split_inner(s: str/&, sepfn: fn(cc: char) -> bool, count: uint, allow_empty: bool) -> [str] unsafe { let l = len(s); let mut result = [], i = 0u, start = 0u, done = 0u; @@ -461,7 +461,7 @@ pure fn split_inner(s: str, sepfn: fn(cc: char) -> bool, count: uint, } // See Issue #1932 for why this is a naive search -pure fn iter_matches(s: str, sep: str, f: fn(uint, uint)) { +pure fn iter_matches(s: str/&a, sep: str/&b, f: fn(uint, uint)) { let sep_len = len(sep), l = len(s); assert sep_len > 0u; let mut i = 0u, match_start = 0u, match_i = 0u; @@ -488,7 +488,7 @@ pure fn iter_matches(s: str, sep: str, f: fn(uint, uint)) { } } -pure fn iter_between_matches(s: str, sep: str, f: fn(uint, uint)) { +pure fn iter_between_matches(s: str/&a, sep: str/&b, f: fn(uint, uint)) { let mut last_end = 0u; iter_matches(s, sep) {|from, to| f(last_end, from); @@ -506,7 +506,7 @@ Splits a string into a vector of the substrings separated by a given string assert [\"\", \"XXX\", \"YYY\", \"\"] == split_str(\".XXX.YYY.\", \".\") ~~~ "] -pure fn split_str(s: str, sep: str) -> [str] { +pure fn split_str(s: str/&a, sep: str/&b) -> [str] { let mut result = []; iter_between_matches(s, sep) {|from, to| unsafe { result += [unsafe::slice_bytes(s, from, to)]; } @@ -514,7 +514,7 @@ pure fn split_str(s: str, sep: str) -> [str] { result } -pure fn split_str_nonempty(s: str, sep: str) -> [str] { +pure fn split_str_nonempty(s: str/&a, sep: str/&b) -> [str] { let mut result = []; iter_between_matches(s, sep) {|from, to| if to > from { @@ -527,13 +527,13 @@ pure fn split_str_nonempty(s: str, sep: str) -> [str] { #[doc = " Splits a string into a vector of the substrings separated by LF ('\\n') "] -pure fn lines(s: str) -> [str] { split_char(s, '\n') } +pure fn lines(s: str/&) -> [str] { split_char(s, '\n') } #[doc = " Splits a string into a vector of the substrings separated by LF ('\\n') and/or CR LF ('\\r\\n') "] -pure fn lines_any(s: str) -> [str] { +pure fn lines_any(s: str/&) -> [str] { vec::map(lines(s), {|s| let l = len(s); let mut cp = s; @@ -547,19 +547,19 @@ pure fn lines_any(s: str) -> [str] { #[doc = " Splits a string into a vector of the substrings separated by whitespace "] -pure fn words(s: str) -> [str] { +pure fn words(s: str/&) -> [str] { split_nonempty(s, {|c| char::is_whitespace(c)}) } #[doc = "Convert a string to lowercase. ASCII only"] -pure fn to_lower(s: str) -> str { +pure fn to_lower(s: str/&) -> str { map(s, {|c| unchecked{(libc::tolower(c as libc::c_char)) as char} }) } #[doc = "Convert a string to uppercase. ASCII only"] -pure fn to_upper(s: str) -> str { +pure fn to_upper(s: str/&) -> str { map(s, {|c| unchecked{(libc::toupper(c as libc::c_char)) as char} }) @@ -629,7 +629,7 @@ Section: Iterating through strings Return true if a predicate matches all characters or if the string contains no characters "] -pure fn all(s: str, it: fn(char) -> bool) -> bool { +pure fn all(s: str/&, it: fn(char) -> bool) -> bool { all_between(s, 0u, len(s), it) } @@ -637,12 +637,12 @@ pure fn all(s: str, it: fn(char) -> bool) -> bool { Return true if a predicate matches any character (and false if it matches none or there are no characters) "] -pure fn any(ss: str, pred: fn(char) -> bool) -> bool { +pure fn any(ss: str/&, pred: fn(char) -> bool) -> bool { !all(ss, {|cc| !pred(cc)}) } #[doc = "Apply a function to each character"] -pure fn map(ss: str, ff: fn(char) -> char) -> str { +pure fn map(ss: str/&, ff: fn(char) -> char) -> str { let mut result = ""; unchecked { reserve(result, len(ss)); @@ -654,7 +654,7 @@ pure fn map(ss: str, ff: fn(char) -> char) -> str { } #[doc = "Iterate over the bytes in a string"] -pure fn bytes_iter(ss: str, it: fn(u8)) { +pure fn bytes_iter(ss: str/&, it: fn(u8)) { let mut pos = 0u; let len = len(ss); @@ -666,7 +666,7 @@ pure fn bytes_iter(ss: str, it: fn(u8)) { #[doc = "Iterate over the bytes in a string"] #[inline(always)] -pure fn each(s: str, it: fn(u8) -> bool) { +pure fn each(s: str/&, it: fn(u8) -> bool) { let mut i = 0u, l = len(s); while (i < l) { if !it(s[i]) { break; } @@ -676,7 +676,7 @@ pure fn each(s: str, it: fn(u8) -> bool) { #[doc = "Iterates over the chars in a string"] #[inline(always)] -pure fn each_char(s: str, it: fn(char) -> bool) { +pure fn each_char(s: str/&, it: fn(char) -> bool) { let mut pos = 0u; let len = len(s); while pos < len { @@ -687,7 +687,7 @@ pure fn each_char(s: str, it: fn(char) -> bool) { } #[doc = "Iterate over the characters in a string"] -pure fn chars_iter(s: str, it: fn(char)) { +pure fn chars_iter(s: str/&, it: fn(char)) { let mut pos = 0u; let len = len(s); while (pos < len) { @@ -700,7 +700,7 @@ pure fn chars_iter(s: str, it: fn(char)) { #[doc = " Apply a function to each substring after splitting by character "] -pure fn split_char_iter(ss: str, cc: char, ff: fn(&&str)) { +pure fn split_char_iter(ss: str/&, cc: char, ff: fn(&&str)) { vec::iter(split_char(ss, cc), ff) } @@ -708,18 +708,18 @@ pure fn split_char_iter(ss: str, cc: char, ff: fn(&&str)) { Apply a function to each substring after splitting by character, up to `count` times "] -pure fn splitn_char_iter(ss: str, sep: char, count: uint, +pure fn splitn_char_iter(ss: str/&, sep: char, count: uint, ff: fn(&&str)) unsafe { vec::iter(splitn_char(ss, sep, count), ff) } #[doc = "Apply a function to each word"] -pure fn words_iter(ss: str, ff: fn(&&str)) { +pure fn words_iter(ss: str/&, ff: fn(&&str)) { vec::iter(words(ss), ff) } #[doc = "Apply a function to each line (by '\\n')"] -pure fn lines_iter(ss: str, ff: fn(&&str)) { +pure fn lines_iter(ss: str/&, ff: fn(&&str)) { vec::iter(lines(ss), ff) } @@ -740,7 +740,7 @@ Returns the byte index of the first matching character An `option` containing the byte index of the first matching character or `none` if there is no match "] -pure fn find_char(s: str, c: char) -> option { +pure fn find_char(s: str/&, c: char) -> option { find_char_between(s, c, 0u, len(s)) } @@ -764,7 +764,7 @@ or `none` if there is no match `start` must be less than or equal to `len(s)`. `start` must be the index of a character boundary, as defined by `is_char_boundary`. "] -pure fn find_char_from(s: str, c: char, start: uint) -> option { +pure fn find_char_from(s: str/&, c: char, start: uint) -> option { find_char_between(s, c, start, len(s)) } @@ -789,7 +789,7 @@ or `none` if there is no match or equal to `len(s)`. `start` must be the index of a character boundary, as defined by `is_char_boundary`. "] -pure fn find_char_between(s: str, c: char, start: uint, end: uint) +pure fn find_char_between(s: str/&, c: char, start: uint, end: uint) -> option { if c < 128u as char { assert start <= end; @@ -819,7 +819,7 @@ Returns the byte index of the last matching character An `option` containing the byte index of the last matching character or `none` if there is no match "] -pure fn rfind_char(s: str, c: char) -> option { +pure fn rfind_char(s: str/&, c: char) -> option { rfind_char_between(s, c, len(s), 0u) } @@ -843,7 +843,7 @@ or `none` if there is no match `start` must be less than or equal to `len(s)`. `start` must be the index of a character boundary, as defined by `is_char_boundary`. "] -pure fn rfind_char_from(s: str, c: char, start: uint) -> option { +pure fn rfind_char_from(s: str/&, c: char, start: uint) -> option { rfind_char_between(s, c, start, 0u) } @@ -868,7 +868,7 @@ or `none` if there is no match or equal to `len(s)`. `start` must be the index of a character boundary, as defined by `is_char_boundary`. "] -pure fn rfind_char_between(s: str, c: char, start: uint, end: uint) +pure fn rfind_char_between(s: str/&, c: char, start: uint, end: uint) -> option { if c < 128u as char { assert start >= end; @@ -899,7 +899,7 @@ the given predicate An `option` containing the byte index of the first matching character or `none` if there is no match "] -pure fn find(s: str, f: fn(char) -> bool) -> option { +pure fn find(s: str/&, f: fn(char) -> bool) -> option { find_between(s, 0u, len(s), f) } @@ -923,7 +923,8 @@ or `none` if there is no match `start` must be less than or equal to `len(s)`. `start` must be the index of a character boundary, as defined by `is_char_boundary`. "] -pure fn find_from(s: str, start: uint, f: fn(char) -> bool) -> option { +pure fn find_from(s: str/&, start: uint, f: fn(char) + -> bool) -> option { find_between(s, start, len(s), f) } @@ -949,7 +950,7 @@ or `none` if there is no match or equal to `len(s)`. `start` must be the index of a character boundary, as defined by `is_char_boundary`. "] -pure fn find_between(s: str, start: uint, end: uint, f: fn(char) -> bool) +pure fn find_between(s: str/&, start: uint, end: uint, f: fn(char) -> bool) -> option { assert start <= end; assert end <= len(s); @@ -977,7 +978,7 @@ the given predicate An option containing the byte index of the last matching character or `none` if there is no match "] -pure fn rfind(s: str, f: fn(char) -> bool) -> option { +pure fn rfind(s: str/&, f: fn(char) -> bool) -> option { rfind_between(s, len(s), 0u, f) } @@ -1001,7 +1002,8 @@ or `none` if there is no match `start` must be less than or equal to `len(s)', `start` must be the index of a character boundary, as defined by `is_char_boundary` "] -pure fn rfind_from(s: str, start: uint, f: fn(char) -> bool) -> option { +pure fn rfind_from(s: str/&, start: uint, f: fn(char) -> bool) + -> option { rfind_between(s, start, 0u, f) } @@ -1027,7 +1029,7 @@ or `none` if there is no match than or equal to `len(s)`. `start` must be the index of a character boundary, as defined by `is_char_boundary` "] -pure fn rfind_between(s: str, start: uint, end: uint, f: fn(char) -> bool) +pure fn rfind_between(s: str/&, start: uint, end: uint, f: fn(char) -> bool) -> option { assert start >= end; assert start <= len(s); @@ -1042,7 +1044,7 @@ pure fn rfind_between(s: str, start: uint, end: uint, f: fn(char) -> bool) } // Utility used by various searching functions -pure fn match_at(haystack: str, needle: str, at: uint) -> bool { +pure fn match_at(haystack: str/&a, needle: str/&b, at: uint) -> bool { let mut i = at; for each(needle) {|c| if haystack[i] != c { ret false; } i += 1u; } ret true; @@ -1061,7 +1063,7 @@ Returns the byte index of the first matching substring An `option` containing the byte index of the first matching substring or `none` if there is no match "] -pure fn find_str(haystack: str, needle: str) -> option { +pure fn find_str(haystack: str/&a, needle: str/&b) -> option { find_str_between(haystack, needle, 0u, len(haystack)) } @@ -1084,7 +1086,7 @@ or `none` if there is no match `start` must be less than or equal to `len(s)` "] -pure fn find_str_from(haystack: str, needle: str, start: uint) +pure fn find_str_from(haystack: str/&a, needle: str/&b, start: uint) -> option { find_str_between(haystack, needle, start, len(haystack)) } @@ -1109,7 +1111,8 @@ or `none` if there is no match `start` must be less than or equal to `end` and `end` must be less than or equal to `len(s)`. "] -pure fn find_str_between(haystack: str, needle: str, start: uint, end:uint) +pure fn find_str_between(haystack: str/&a, needle: str/&b, start: uint, + end:uint) -> option { // See Issue #1932 for why this is a naive search assert end <= len(haystack); @@ -1134,7 +1137,7 @@ Returns true if one string contains another * haystack - The string to look in * needle - The string to look for "] -pure fn contains(haystack: str, needle: str) -> bool { +pure fn contains(haystack: str/&a, needle: str/&b) -> bool { option::is_some(find_str(haystack, needle)) } @@ -1146,7 +1149,7 @@ Returns true if one string starts with another * haystack - The string to look in * needle - The string to look for "] -pure fn starts_with(haystack: str, needle: str) -> bool unsafe { +pure fn starts_with(haystack: str/&a, needle: str/&b) -> bool unsafe { let haystack_len = len(haystack), needle_len = len(needle); if needle_len == 0u { true } else if needle_len > haystack_len { false } @@ -1161,7 +1164,7 @@ Returns true if one string ends with another * haystack - The string to look in * needle - The string to look for "] -pure fn ends_with(haystack: str, needle: str) -> bool { +pure fn ends_with(haystack: str/&a, needle: str/&b) -> bool { let haystack_len = len(haystack), needle_len = len(needle); if needle_len == 0u { true } else if needle_len > haystack_len { false } @@ -1173,24 +1176,24 @@ Section: String properties */ #[doc = "Determines if a string contains only ASCII characters"] -pure fn is_ascii(s: str) -> bool { +pure fn is_ascii(s: str/&) -> bool { let mut i: uint = len(s); while i > 0u { i -= 1u; if !u8::is_ascii(s[i]) { ret false; } } ret true; } #[doc = "Returns true if the string has length 0"] -pure fn is_empty(s: str) -> bool { len(s) == 0u } +pure fn is_empty(s: str/&) -> bool { len(s) == 0u } #[doc = "Returns true if the string has length greater than 0"] -pure fn is_not_empty(s: str) -> bool { !is_empty(s) } +pure fn is_not_empty(s: str/&) -> bool { !is_empty(s) } #[doc = " Returns true if the string contains only whitespace Whitespace characters are determined by `char::is_whitespace` "] -pure fn is_whitespace(s: str) -> bool { +pure fn is_whitespace(s: str/&) -> bool { ret all(s, char::is_whitespace); } @@ -1199,27 +1202,26 @@ Returns true if the string contains only alphanumerics Alphanumeric characters are determined by `char::is_alphanumeric` "] -fn is_alphanumeric(s: str) -> bool { +fn is_alphanumeric(s: str/&) -> bool { ret all(s, char::is_alphanumeric); } #[doc = " Returns the string length/size in bytes not counting the null terminator "] -pure fn len(s: str) -> uint unsafe { - let repr: *vec::unsafe::vec_repr = ::unsafe::reinterpret_cast(s); - (*repr).fill - 1u +pure fn len(s: str/&) -> uint { + unpack_slice(s) { |_p, n| n - 1u } } #[doc = "Returns the number of characters that a string holds"] -pure fn char_len(s: str) -> uint { count_chars(s, 0u, len(s)) } +pure fn char_len(s: str/&) -> uint { count_chars(s, 0u, len(s)) } /* Section: Misc */ #[doc = "Determines if a vector of bytes contains valid UTF-8"] -pure fn is_utf8(v: [const u8]) -> bool { +pure fn is_utf8(v: [const u8]/&) -> bool { let mut i = 0u; let total = vec::len::(v); while i < total { @@ -1237,7 +1239,7 @@ pure fn is_utf8(v: [const u8]) -> bool { } #[doc = "Determines if a vector of `u16` contains valid UTF-16"] -pure fn is_utf16(v: [const u16]) -> bool { +pure fn is_utf16(v: [const u16]/&) -> bool { let len = vec::len(v); let mut i = 0u; while (i < len) { @@ -1258,7 +1260,7 @@ pure fn is_utf16(v: [const u16]) -> bool { } #[doc = "Converts to a vector of `u16` encoded as UTF-16"] -pure fn to_utf16(s: str) -> [u16] { +pure fn to_utf16(s: str/&) -> [u16] { let mut u = []; chars_iter(s) {|cch| // Arithmetic with u32 literals is easier on the eyes than chars. @@ -1280,7 +1282,7 @@ pure fn to_utf16(s: str) -> [u16] { ret u; } -pure fn utf16_chars(v: [const u16], f: fn(char)) { +pure fn utf16_chars(v: [const u16]/&, f: fn(char)) { let len = vec::len(v); let mut i = 0u; while (i < len && v[i] != 0u16) { @@ -1305,7 +1307,7 @@ pure fn utf16_chars(v: [const u16], f: fn(char)) { } -pure fn from_utf16(v: [const u16]) -> str { +pure fn from_utf16(v: [const u16]/&) -> str { let mut buf = ""; unchecked { reserve(buf, vec::len(v)); @@ -1328,7 +1330,7 @@ As char_len but for a slice of a string The number of Unicode characters in `s` between the given indices. "] -pure fn count_chars(s: str, start: uint, end: uint) -> uint { +pure fn count_chars(s: str/&, start: uint, end: uint) -> uint { assert is_char_boundary(s, start); assert is_char_boundary(s, end); let mut i = start, len = 0u; @@ -1343,7 +1345,7 @@ pure fn count_chars(s: str, start: uint, end: uint) -> uint { #[doc = " Counts the number of bytes taken by the `n` in `s` starting from `start`. "] -pure fn count_bytes(s: str, start: uint, n: uint) -> uint { +pure fn count_bytes(s: str/&b, start: uint, n: uint) -> uint { assert is_char_boundary(s, start); let mut end = start, cnt = n; let l = len(s); @@ -1375,7 +1377,7 @@ pure fn utf8_char_width(b: u8) -> uint { Returns false if the index points into the middle of a multi-byte character sequence. "] -pure fn is_char_boundary(s: str, index: uint) -> bool { +pure fn is_char_boundary(s: str/&, index: uint) -> bool { if index == len(s) { ret true; } let b = s[index]; ret b < 128u8 || b >= 192u8; @@ -1428,7 +1430,7 @@ index of the next unicode character. If `i` is greater than or equal to the length of the string. If `i` is not the index of the beginning of a valid UTF-8 character. "] -pure fn char_range_at(s: str, i: uint) -> {ch: char, next: uint} { +pure fn char_range_at(s: str/&, i: uint) -> {ch: char, next: uint} { let b0 = s[i]; let w = utf8_char_width(b0); assert (w != 0u); @@ -1451,14 +1453,14 @@ pure fn char_range_at(s: str, i: uint) -> {ch: char, next: uint} { } #[doc = "Pluck a character out of a string"] -pure fn char_at(s: str, i: uint) -> char { ret char_range_at(s, i).ch; } +pure fn char_at(s: str/&, i: uint) -> char { ret char_range_at(s, i).ch; } #[doc = " Given a byte position and a str, return the previous char and its position This function can be used to iterate over a unicode string in reverse. "] -pure fn char_range_at_reverse(ss: str, start: uint) +pure fn char_range_at_reverse(ss: str/&, start: uint) -> {ch: char, prev: uint} { let mut prev = start; @@ -1497,7 +1499,7 @@ Loop through a substring, char by char `true` If execution proceeded correctly, `false` if it was interrupted, that is if `it` returned `false` at any point. "] -pure fn all_between(s: str, start: uint, end: uint, +pure fn all_between(s: str/&, start: uint, end: uint, it: fn(char) -> bool) -> bool { assert is_char_boundary(s, start); let mut i = start; @@ -1530,7 +1532,7 @@ Loop through a substring, char by char `true` if `it` returns `true` for any character "] -pure fn any_between(s: str, start: uint, end: uint, +pure fn any_between(s: str/&, start: uint, end: uint, it: fn(char) -> bool) -> bool { !all_between(s, start, end, {|c| !it(c)}) } @@ -1668,7 +1670,7 @@ pure fn capacity(&&s: str) -> uint unsafe { } #[doc = "Escape each char in `s` with char::escape_default."] -pure fn escape_default(s: str) -> str { +pure fn escape_default(s: str/&) -> str { let mut out: str = ""; unchecked { reserve_at_least(out, str::len(s)); @@ -1678,7 +1680,7 @@ pure fn escape_default(s: str) -> str { } #[doc = "Escape each char in `s` with char::escape_unicode."] -pure fn escape_unicode(s: str) -> str { +pure fn escape_unicode(s: str/&) -> str { let mut out: str = ""; unchecked { reserve_at_least(out, str::len(s)); @@ -1762,11 +1764,11 @@ mod unsafe { If begin is greater than end. If end is greater than the length of the string. "] - unsafe fn slice_bytes(s: str, begin: uint, end: uint) -> str unsafe { - assert (begin <= end); - assert (end <= len(s)); + unsafe fn slice_bytes(s: str/&, begin: uint, end: uint) -> str unsafe { + unpack_slice(s) { |sbuf, n| + assert (begin <= end); + assert (end <= n); - let mut v = as_buf(s) { |sbuf| let mut v = []; vec::reserve(v, end - begin + 1u); vec::as_buf(v) { |vbuf| @@ -1774,10 +1776,9 @@ mod unsafe { ptr::memcpy(vbuf, src, end - begin); } vec::unsafe::set_len(v, end - begin); - v - }; - v += [0u8]; - ret ::unsafe::transmute(v); + v += [0u8]; + ::unsafe::transmute(v) + } } #[doc = "Appends a byte to a string. (Not UTF-8 safe)."] @@ -1834,6 +1835,19 @@ mod unsafe { #[doc = "Extension methods for strings"] impl extensions for str { + #[doc = "Returns a string with leading and trailing whitespace removed"] + #[inline] + fn trim() -> str { trim(self) } + #[doc = "Returns a string with leading whitespace removed"] + #[inline] + fn trim_left() -> str { trim_left(self) } + #[doc = "Returns a string with trailing whitespace removed"] + #[inline] + fn trim_right() -> str { trim_right(self) } +} + +#[doc = "Extension methods for strings"] +impl extensions/& for str/& { #[doc = " Return true if a predicate matches all characters or if the string contains no characters @@ -1848,7 +1862,7 @@ impl extensions for str { fn any(it: fn(char) -> bool) -> bool { any(self, it) } #[doc = "Returns true if one string contains another"] #[inline] - fn contains(needle: str) -> bool { contains(self, needle) } + fn contains(needle: str/&a) -> bool { contains(self, needle) } #[doc = "Iterate over the bytes in a string"] #[inline] fn each(it: fn(u8) -> bool) { each(self, it) } @@ -1857,7 +1871,7 @@ impl extensions for str { fn each_char(it: fn(char) -> bool) { each_char(self, it) } #[doc = "Returns true if one string ends with another"] #[inline] - fn ends_with(needle: str) -> bool { ends_with(self, needle) } + fn ends_with(needle: str/&) -> bool { ends_with(self, needle) } #[doc = "Returns true if the string has length 0"] #[inline] fn is_empty() -> bool { is_empty(self) } @@ -1902,10 +1916,10 @@ impl extensions for str { string "] #[inline] - fn split_str(sep: str) -> [str] { split_str(self, sep) } + fn split_str(sep: str/&a) -> [str] { split_str(self, sep) } #[doc = "Returns true if one string starts with another"] #[inline] - fn starts_with(needle: str) -> bool { starts_with(self, needle) } + fn starts_with(needle: str/&a) -> bool { starts_with(self, needle) } #[doc = " Take a substring of another. @@ -1920,15 +1934,6 @@ impl extensions for str { #[doc = "Convert a string to uppercase"] #[inline] fn to_upper() -> str { to_upper(self) } - #[doc = "Returns a string with leading and trailing whitespace removed"] - #[inline] - fn trim() -> str { trim(self) } - #[doc = "Returns a string with leading whitespace removed"] - #[inline] - fn trim_left() -> str { trim_left(self) } - #[doc = "Returns a string with trailing whitespace removed"] - #[inline] - fn trim_right() -> str { trim_right(self) } #[doc = "Escape each char in `s` with char::escape_default."] #[inline] fn escape_default() -> str { escape_default(self) } @@ -2091,7 +2096,7 @@ mod tests { #[test] fn test_split_str() { - fn t(s: str, sep: str, i: int, k: str) { + fn t(s: str, sep: str/&a, i: int, k: str) { let v = split_str(s, sep); assert eq(v[i], k); } diff --git a/src/libcore/uint-template.rs b/src/libcore/uint-template.rs index cb9b759eeaed1..fda4253a688d4 100644 --- a/src/libcore/uint-template.rs +++ b/src/libcore/uint-template.rs @@ -16,21 +16,21 @@ export ord, eq, num; const min_value: T = 0 as T; const max_value: T = 0 as T - 1 as T; -pure fn min(x: T, y: T) -> T { if x < y { x } else { y } } -pure fn max(x: T, y: T) -> T { if x > y { x } else { y } } - -pure fn add(x: T, y: T) -> T { x + y } -pure fn sub(x: T, y: T) -> T { x - y } -pure fn mul(x: T, y: T) -> T { x * y } -pure fn div(x: T, y: T) -> T { x / y } -pure fn rem(x: T, y: T) -> T { x % y } - -pure fn lt(x: T, y: T) -> bool { x < y } -pure fn le(x: T, y: T) -> bool { x <= y } -pure fn eq(x: T, y: T) -> bool { x == y } -pure fn ne(x: T, y: T) -> bool { x != y } -pure fn ge(x: T, y: T) -> bool { x >= y } -pure fn gt(x: T, y: T) -> bool { x > y } +pure fn min(&&x: T, &&y: T) -> T { if x < y { x } else { y } } +pure fn max(&&x: T, &&y: T) -> T { if x > y { x } else { y } } + +pure fn add(&&x: T, &&y: T) -> T { x + y } +pure fn sub(&&x: T, &&y: T) -> T { x - y } +pure fn mul(&&x: T, &&y: T) -> T { x * y } +pure fn div(&&x: T, &&y: T) -> T { x / y } +pure fn rem(&&x: T, &&y: T) -> T { x % y } + +pure fn lt(&&x: T, &&y: T) -> bool { x < y } +pure fn le(&&x: T, &&y: T) -> bool { x <= y } +pure fn eq(&&x: T, &&y: T) -> bool { x == y } +pure fn ne(&&x: T, &&y: T) -> bool { x != y } +pure fn ge(&&x: T, &&y: T) -> bool { x >= y } +pure fn gt(&&x: T, &&y: T) -> bool { x > y } pure fn is_positive(x: T) -> bool { x > 0 as T } pure fn is_negative(x: T) -> bool { x < 0 as T } diff --git a/src/libcore/uint-template/uint.rs b/src/libcore/uint-template/uint.rs index c8fb9731d472f..e1bb89c27c4e7 100644 --- a/src/libcore/uint-template/uint.rs +++ b/src/libcore/uint-template/uint.rs @@ -54,7 +54,7 @@ is either `x/y` or `x/y + 1`. pure fn div_floor(x: uint, y: uint) -> uint { ret x / y; } #[doc = "Produce a uint suitable for use in a hash table"] -pure fn hash(x: uint) -> uint { ret x; } +pure fn hash(&&x: uint) -> uint { ret x; } #[doc = " Iterate over the range [`lo`..`hi`), or stop when requested diff --git a/src/libcore/vec.rs b/src/libcore/vec.rs index a94b8e6e1b49e..7058f2202b9f0 100644 --- a/src/libcore/vec.rs +++ b/src/libcore/vec.rs @@ -266,7 +266,7 @@ pure fn view(v: [T]/&, start: uint, end: uint) -> [T]/&a { #[doc = " Split the vector `v` by applying each element against the predicate `f`. "] -fn split(v: [T], f: fn(T) -> bool) -> [[T]] { +fn split(v: [T]/&, f: fn(T) -> bool) -> [[T]] { let ln = len(v); if (ln == 0u) { ret [] } @@ -289,7 +289,7 @@ fn split(v: [T], f: fn(T) -> bool) -> [[T]] { Split the vector `v` by applying each element against the predicate `f` up to `n` times. "] -fn splitn(v: [T], n: uint, f: fn(T) -> bool) -> [[T]] { +fn splitn(v: [T]/&, n: uint, f: fn(T) -> bool) -> [[T]] { let ln = len(v); if (ln == 0u) { ret [] } @@ -315,7 +315,7 @@ fn splitn(v: [T], n: uint, f: fn(T) -> bool) -> [[T]] { Reverse split the vector `v` by applying each element against the predicate `f`. "] -fn rsplit(v: [T], f: fn(T) -> bool) -> [[T]] { +fn rsplit(v: [T]/&, f: fn(T) -> bool) -> [[T]] { let ln = len(v); if (ln == 0u) { ret [] } @@ -338,7 +338,7 @@ fn rsplit(v: [T], f: fn(T) -> bool) -> [[T]] { Reverse split the vector `v` by applying each element against the predicate `f` up to `n times. "] -fn rsplitn(v: [T], n: uint, f: fn(T) -> bool) -> [[T]] { +fn rsplitn(v: [T]/&, n: uint, f: fn(T) -> bool) -> [[T]] { let ln = len(v); if (ln == 0u) { ret [] } @@ -776,7 +776,7 @@ Convert two vectors to a vector of pairs Returns a vector of tuples, where the i-th tuple contains contains the i-th elements from each of the input vectors. "] -pure fn zip(v: [const T], u: [const U]) -> [(T, U)] { +pure fn zip(v: [const T]/&, u: [const U]/&) -> [(T, U)] { let mut zipped = []; let sz = len(v); let mut i = 0u; @@ -859,7 +859,7 @@ Iterates over a vector, with option to break Return true to continue, false to break. "] #[inline(always)] -pure fn each(v: [T]/&, f: fn(T) -> bool) unsafe { +pure fn each(v: [const T]/&, f: fn(T) -> bool) unsafe { vec::unpack_slice(v) {|p, n| let mut n = n; let mut p = p; @@ -877,7 +877,7 @@ Iterates over a vector's elements and indices Return true to continue, false to break. "] #[inline(always)] -pure fn eachi(v: [T]/&, f: fn(uint, T) -> bool) unsafe { +pure fn eachi(v: [const T]/&, f: fn(uint, T) -> bool) unsafe { vec::unpack_slice(v) {|p, n| let mut i = 0u; let mut p = p; @@ -996,7 +996,7 @@ fn as_mut_buf(v: [mut E]/&, f: fn(*mut E) -> T) -> T unsafe { Work with the buffer and length of a slice. "] #[inline(always)] -pure fn unpack_slice(s: [T]/&, +pure fn unpack_slice(s: [const T]/&, f: fn(*T, uint) -> U) -> U unsafe { let v : *(*T,uint) = ::unsafe::reinterpret_cast(ptr::addr_of(s)); let (buf,len) = *v; @@ -1316,7 +1316,7 @@ mod u8 { // // This cannot be used with iter-trait.rs because of the region pointer // required in the slice. -impl extensions/& of iter::base_iter for [A]/& { +impl extensions/& of iter::base_iter for [const A]/& { fn each(blk: fn(A) -> bool) { each(self, blk) } fn size_hint() -> option { some(len(self)) } fn eachi(blk: fn(uint, A) -> bool) { iter::eachi(self, blk) } @@ -1328,7 +1328,7 @@ impl extensions/& of iter::base_iter for [A]/& { fn contains(x: A) -> bool { iter::contains(self, x) } fn count(x: A) -> uint { iter::count(self, x) } } -impl extensions/& for [A]/& { +impl extensions/& for [const A]/& { fn filter_to_vec(pred: fn(A) -> bool) -> [A] { iter::filter_to_vec(self, pred) } diff --git a/src/libstd/json.rs b/src/libstd/json.rs index 5d9427c72b860..1ac6a54765978 100644 --- a/src/libstd/json.rs +++ b/src/libstd/json.rs @@ -28,17 +28,17 @@ export null; #[doc = "Represents a json value"] enum json { num(float), - string(str), + string(@str), boolean(bool), - list([json]), - dict(map::hashmap), + list(@[json]), + dict(map::hashmap), null, } type error = { line: uint, col: uint, - msg: str, + msg: @str, }; #[doc = "Serializes a json value into a io::writer"] @@ -46,22 +46,7 @@ fn to_writer(wr: io::writer, j: json) { alt j { num(n) { wr.write_str(float::to_str(n, 6u)); } string(s) { - wr.write_char('"'); - let mut escaped = ""; - str::chars_iter(s) { |c| - alt c { - '"' { escaped += "\\\""; } - '\\' { escaped += "\\\\"; } - '\x08' { escaped += "\\b"; } - '\x0c' { escaped += "\\f"; } - '\n' { escaped += "\\n"; } - '\r' { escaped += "\\r"; } - '\t' { escaped += "\\t"; } - _ { escaped += str::from_char(c); } - } - }; - wr.write_str(escaped); - wr.write_char('"'); + wr.write_str(escape_str(*s)); } boolean(b) { wr.write_str(if b { "true" } else { "false" }); @@ -69,7 +54,7 @@ fn to_writer(wr: io::writer, j: json) { list(v) { wr.write_char('['); let mut first = true; - vec::iter(v) { |item| + for (*v).each { |item| if !first { wr.write_str(", "); } @@ -91,7 +76,7 @@ fn to_writer(wr: io::writer, j: json) { wr.write_str(", "); } first = false; - to_writer(wr, string(key)); + wr.write_str(escape_str(key)); wr.write_str(": "); to_writer(wr, value); }; @@ -103,6 +88,26 @@ fn to_writer(wr: io::writer, j: json) { } } +fn escape_str(s: str) -> str { + let mut escaped = "\""; + str::chars_iter(s) { |c| + alt c { + '"' { escaped += "\\\""; } + '\\' { escaped += "\\\\"; } + '\x08' { escaped += "\\b"; } + '\x0c' { escaped += "\\f"; } + '\n' { escaped += "\\n"; } + '\r' { escaped += "\\r"; } + '\t' { escaped += "\\t"; } + _ { escaped += str::from_char(c); } + } + }; + + escaped += "\""; + + escaped +} + #[doc = "Serializes a json value into a string"] fn to_str(j: json) -> str { io::with_str_writer { |wr| to_writer(wr, j) } @@ -134,8 +139,8 @@ impl parser for parser { self.ch } - fn error(msg: str) -> result { - err({ line: self.line, col: self.col, msg: msg }) + fn error(+msg: str) -> result { + err({ line: self.line, col: self.col, msg: @msg }) } fn parse() -> result { @@ -318,7 +323,7 @@ impl parser for parser { ok(res) } - fn parse_str() -> result { + fn parse_str() -> result<@str, error> { let mut escape = false; let mut res = ""; @@ -365,7 +370,7 @@ impl parser for parser { } else { if self.ch == '"' { self.bump(); - ret ok(res); + ret ok(@res); } str::push_char(res, self.ch); } @@ -382,7 +387,7 @@ impl parser for parser { if self.ch == ']' { self.bump(); - ret ok(list(values)); + ret ok(list(@values)); } loop { @@ -398,7 +403,7 @@ impl parser for parser { alt self.ch { ',' { self.bump(); } - ']' { self.bump(); ret ok(list(values)); } + ']' { self.bump(); ret ok(list(@values)); } _ { ret self.error("expecting ',' or ']'"); } } }; @@ -436,7 +441,7 @@ impl parser for parser { self.bump(); alt self.parse_value() { - ok(value) { values.insert(key, value); } + ok(value) { values.insert(copy *key, value); } e { ret e; } } self.parse_whitespace(); @@ -478,7 +483,7 @@ fn eq(value0: json, value1: json) -> bool { (num(f0), num(f1)) { f0 == f1 } (string(s0), string(s1)) { s0 == s1 } (boolean(b0), boolean(b1)) { b0 == b1 } - (list(l0), list(l1)) { vec::all2(l0, l1, eq) } + (list(l0), list(l1)) { vec::all2(*l0, *l1, eq) } (dict(d0), dict(d1)) { if d0.size() == d1.size() { let mut equal = true; @@ -558,13 +563,17 @@ impl of to_json for bool { } impl of to_json for str { + fn to_json() -> json { string(@copy self) } +} + +impl of to_json for @str { fn to_json() -> json { string(self) } } impl of to_json for (A, B) { fn to_json() -> json { let (a, b) = self; - list([a.to_json(), b.to_json()]) + list(@[a.to_json(), b.to_json()]) } } @@ -572,19 +581,19 @@ impl of to_json for (A, B, C) { fn to_json() -> json { let (a, b, c) = self; - list([a.to_json(), b.to_json(), c.to_json()]) + list(@[a.to_json(), b.to_json(), c.to_json()]) } } impl of to_json for [A] { - fn to_json() -> json { list(self.map { |elt| elt.to_json() }) } + fn to_json() -> json { list(@self.map { |elt| elt.to_json() }) } } impl of to_json for hashmap { fn to_json() -> json { let d = map::str_hash(); for self.each() { |key, value| - d.insert(key, value.to_json()); + d.insert(copy key, value.to_json()); } dict(d) } @@ -603,13 +612,19 @@ impl of to_str::to_str for json { fn to_str() -> str { to_str(self) } } +impl of to_str::to_str for error { + fn to_str() -> str { + #fmt("%u:%u: %s", self.line, self.col, *self.msg) + } +} + #[cfg(test)] mod tests { fn mk_dict(items: [(str, json)]) -> json { let d = map::str_hash(); vec::iter(items) { |item| - let (key, value) = item; + let (key, value) = copy item; d.insert(key, value); }; @@ -631,8 +646,8 @@ mod tests { #[test] fn test_write_str() { - assert to_str(string("")) == "\"\""; - assert to_str(string("foo")) == "\"foo\""; + assert to_str(string(@"")) == "\"\""; + assert to_str(string(@"foo")) == "\"foo\""; } #[test] @@ -643,12 +658,12 @@ mod tests { #[test] fn test_write_list() { - assert to_str(list([])) == "[]"; - assert to_str(list([boolean(true)])) == "[true]"; - assert to_str(list([ + assert to_str(list(@[])) == "[]"; + assert to_str(list(@[boolean(true)])) == "[true]"; + assert to_str(list(@[ boolean(false), null, - list([string("foo\nbar"), num(3.5f)]) + list(@[string(@"foo\nbar"), num(3.5f)]) ])) == "[false, null, [\"foo\\nbar\", 3.5]]"; } @@ -658,9 +673,9 @@ mod tests { assert to_str(mk_dict([("a", boolean(true))])) == "{ \"a\": true }"; assert to_str(mk_dict([ ("a", boolean(true)), - ("b", list([ - mk_dict([("c", string("\x0c\r"))]), - mk_dict([("d", string(""))]) + ("b", list(@[ + mk_dict([("c", string(@"\x0c\r"))]), + mk_dict([("d", string(@""))]) ])) ])) == "{ " + @@ -675,35 +690,35 @@ mod tests { #[test] fn test_trailing_characters() { assert from_str("nulla") == - err({line: 1u, col: 5u, msg: "trailing characters"}); + err({line: 1u, col: 5u, msg: @"trailing characters"}); assert from_str("truea") == - err({line: 1u, col: 5u, msg: "trailing characters"}); + err({line: 1u, col: 5u, msg: @"trailing characters"}); assert from_str("falsea") == - err({line: 1u, col: 6u, msg: "trailing characters"}); + err({line: 1u, col: 6u, msg: @"trailing characters"}); assert from_str("1a") == - err({line: 1u, col: 2u, msg: "trailing characters"}); + err({line: 1u, col: 2u, msg: @"trailing characters"}); assert from_str("[]a") == - err({line: 1u, col: 3u, msg: "trailing characters"}); + err({line: 1u, col: 3u, msg: @"trailing characters"}); assert from_str("{}a") == - err({line: 1u, col: 3u, msg: "trailing characters"}); + err({line: 1u, col: 3u, msg: @"trailing characters"}); } #[test] fn test_read_identifiers() { assert from_str("n") == - err({line: 1u, col: 2u, msg: "invalid syntax"}); + err({line: 1u, col: 2u, msg: @"invalid syntax"}); assert from_str("nul") == - err({line: 1u, col: 4u, msg: "invalid syntax"}); + err({line: 1u, col: 4u, msg: @"invalid syntax"}); assert from_str("t") == - err({line: 1u, col: 2u, msg: "invalid syntax"}); + err({line: 1u, col: 2u, msg: @"invalid syntax"}); assert from_str("truz") == - err({line: 1u, col: 4u, msg: "invalid syntax"}); + err({line: 1u, col: 4u, msg: @"invalid syntax"}); assert from_str("f") == - err({line: 1u, col: 2u, msg: "invalid syntax"}); + err({line: 1u, col: 2u, msg: @"invalid syntax"}); assert from_str("faz") == - err({line: 1u, col: 3u, msg: "invalid syntax"}); + err({line: 1u, col: 3u, msg: @"invalid syntax"}); assert from_str("null") == ok(null); assert from_str("true") == ok(boolean(true)); @@ -716,20 +731,20 @@ mod tests { #[test] fn test_read_num() { assert from_str("+") == - err({line: 1u, col: 1u, msg: "invalid syntax"}); + err({line: 1u, col: 1u, msg: @"invalid syntax"}); assert from_str(".") == - err({line: 1u, col: 1u, msg: "invalid syntax"}); + err({line: 1u, col: 1u, msg: @"invalid syntax"}); assert from_str("-") == - err({line: 1u, col: 2u, msg: "invalid number"}); + err({line: 1u, col: 2u, msg: @"invalid number"}); assert from_str("00") == - err({line: 1u, col: 2u, msg: "invalid number"}); + err({line: 1u, col: 2u, msg: @"invalid number"}); assert from_str("1.") == - err({line: 1u, col: 3u, msg: "invalid number"}); + err({line: 1u, col: 3u, msg: @"invalid number"}); assert from_str("1e") == - err({line: 1u, col: 3u, msg: "invalid number"}); + err({line: 1u, col: 3u, msg: @"invalid number"}); assert from_str("1e+") == - err({line: 1u, col: 4u, msg: "invalid number"}); + err({line: 1u, col: 4u, msg: @"invalid number"}); assert from_str("3") == ok(num(3f)); assert from_str("3.1") == ok(num(3.1f)); @@ -744,82 +759,86 @@ mod tests { #[test] fn test_read_str() { assert from_str("\"") == - err({line: 1u, col: 2u, msg: "EOF while parsing string"}); + err({line: 1u, col: 2u, msg: @"EOF while parsing string"}); assert from_str("\"lol") == - err({line: 1u, col: 5u, msg: "EOF while parsing string"}); + err({line: 1u, col: 5u, msg: @"EOF while parsing string"}); - assert from_str("\"\"") == ok(string("")); - assert from_str("\"foo\"") == ok(string("foo")); - assert from_str("\"\\\"\"") == ok(string("\"")); - assert from_str("\"\\b\"") == ok(string("\x08")); - assert from_str("\"\\n\"") == ok(string("\n")); - assert from_str("\"\\r\"") == ok(string("\r")); - assert from_str("\"\\t\"") == ok(string("\t")); - assert from_str(" \"foo\" ") == ok(string("foo")); + assert from_str("\"\"") == ok(string(@"")); + assert from_str("\"foo\"") == ok(string(@"foo")); + assert from_str("\"\\\"\"") == ok(string(@"\"")); + assert from_str("\"\\b\"") == ok(string(@"\x08")); + assert from_str("\"\\n\"") == ok(string(@"\n")); + assert from_str("\"\\r\"") == ok(string(@"\r")); + assert from_str("\"\\t\"") == ok(string(@"\t")); + assert from_str(" \"foo\" ") == ok(string(@"foo")); } #[test] fn test_read_list() { assert from_str("[") == - err({line: 1u, col: 2u, msg: "EOF while parsing value"}); + err({line: 1u, col: 2u, msg: @"EOF while parsing value"}); assert from_str("[1") == - err({line: 1u, col: 3u, msg: "EOF while parsing list"}); + err({line: 1u, col: 3u, msg: @"EOF while parsing list"}); assert from_str("[1,") == - err({line: 1u, col: 4u, msg: "EOF while parsing value"}); + err({line: 1u, col: 4u, msg: @"EOF while parsing value"}); assert from_str("[1,]") == - err({line: 1u, col: 4u, msg: "invalid syntax"}); + err({line: 1u, col: 4u, msg: @"invalid syntax"}); assert from_str("[6 7]") == - err({line: 1u, col: 4u, msg: "expecting ',' or ']'"}); - - assert from_str("[]") == ok(list([])); - assert from_str("[ ]") == ok(list([])); - assert from_str("[true]") == ok(list([boolean(true)])); - assert from_str("[ false ]") == ok(list([boolean(false)])); - assert from_str("[null]") == ok(list([null])); - assert from_str("[3, 1]") == ok(list([num(3f), num(1f)])); - assert from_str("\n[3, 2]\n") == ok(list([num(3f), num(2f)])); + err({line: 1u, col: 4u, msg: @"expecting ',' or ']'"}); + + assert from_str("[]") == ok(list(@[])); + assert from_str("[ ]") == ok(list(@[])); + assert from_str("[true]") == ok(list(@[boolean(true)])); + assert from_str("[ false ]") == ok(list(@[boolean(false)])); + assert from_str("[null]") == ok(list(@[null])); + assert from_str("[3, 1]") == ok(list(@[num(3f), num(1f)])); + assert from_str("\n[3, 2]\n") == ok(list(@[num(3f), num(2f)])); assert from_str("[2, [4, 1]]") == - ok(list([num(2f), list([num(4f), num(1f)])])); + ok(list(@[num(2f), list(@[num(4f), num(1f)])])); } #[test] fn test_read_dict() { assert from_str("{") == - err({line: 1u, col: 2u, msg: "EOF while parsing object"}); + err({line: 1u, col: 2u, msg: @"EOF while parsing object"}); assert from_str("{ ") == - err({line: 1u, col: 3u, msg: "EOF while parsing object"}); + err({line: 1u, col: 3u, msg: @"EOF while parsing object"}); assert from_str("{1") == - err({line: 1u, col: 2u, msg: "key must be a string"}); + err({line: 1u, col: 2u, msg: @"key must be a string"}); assert from_str("{ \"a\"") == - err({line: 1u, col: 6u, msg: "EOF while parsing object"}); + err({line: 1u, col: 6u, msg: @"EOF while parsing object"}); assert from_str("{\"a\"") == - err({line: 1u, col: 5u, msg: "EOF while parsing object"}); + err({line: 1u, col: 5u, msg: @"EOF while parsing object"}); assert from_str("{\"a\" ") == - err({line: 1u, col: 6u, msg: "EOF while parsing object"}); + err({line: 1u, col: 6u, msg: @"EOF while parsing object"}); assert from_str("{\"a\" 1") == - err({line: 1u, col: 6u, msg: "expecting ':'"}); + err({line: 1u, col: 6u, msg: @"expecting ':'"}); assert from_str("{\"a\":") == - err({line: 1u, col: 6u, msg: "EOF while parsing value"}); + err({line: 1u, col: 6u, msg: @"EOF while parsing value"}); assert from_str("{\"a\":1") == - err({line: 1u, col: 7u, msg: "EOF while parsing object"}); + err({line: 1u, col: 7u, msg: @"EOF while parsing object"}); assert from_str("{\"a\":1 1") == - err({line: 1u, col: 8u, msg: "expecting ',' or '}'"}); + err({line: 1u, col: 8u, msg: @"expecting ',' or '}'"}); assert from_str("{\"a\":1,") == - err({line: 1u, col: 8u, msg: "EOF while parsing object"}); + err({line: 1u, col: 8u, msg: @"EOF while parsing object"}); assert eq(result::get(from_str("{}")), mk_dict([])); assert eq(result::get(from_str("{\"a\": 3}")), mk_dict([("a", num(3.0f))])); assert eq(result::get(from_str("{ \"a\": null, \"b\" : true }")), - mk_dict([("a", null), ("b", boolean(true))])); + mk_dict([ + ("a", null), + ("b", boolean(true))])); assert eq(result::get(from_str("\n{ \"a\": null, \"b\" : true }\n")), - mk_dict([("a", null), ("b", boolean(true))])); + mk_dict([ + ("a", null), + ("b", boolean(true))])); assert eq(result::get(from_str("{\"a\" : 1.0 ,\"b\": [ true ]}")), mk_dict([ ("a", num(1.0)), - ("b", list([boolean(true)])) + ("b", list(@[boolean(true)])) ])); assert eq(result::get(from_str( "{" + @@ -832,9 +851,9 @@ mod tests { "}")), mk_dict([ ("a", num(1.0f)), - ("b", list([ + ("b", list(@[ boolean(true), - string("foo\nbar"), + string(@"foo\nbar"), mk_dict([ ("c", mk_dict([("d", null)])) ]) @@ -845,6 +864,6 @@ mod tests { #[test] fn test_multiline_errors() { assert from_str("{\n \"foo\":\n \"bar\"") == - err({line: 3u, col: 8u, msg: "EOF while parsing object"}); + err({line: 3u, col: 8u, msg: @"EOF while parsing object"}); } } diff --git a/src/libstd/map.rs b/src/libstd/map.rs index fc6d3ee52fc7f..140349089a81d 100644 --- a/src/libstd/map.rs +++ b/src/libstd/map.rs @@ -262,7 +262,7 @@ mod chained { ret vec::to_mut(vec::from_elem(nchains, absent)); } - fn mk(hasher: hashfn, eqer: eqfn) -> t { + fn mk(hasher: hashfn, eqer: eqfn) -> t { let initial_capacity: uint = 32u; // 2^5 let slf: t = @{mut count: 0u, mut chains: chains(initial_capacity), @@ -282,7 +282,7 @@ Parameters: hasher - The hash function for key type K eqer - The equality function for key type K */ -fn hashmap(hasher: hashfn, eqer: eqfn) +fn hashmap(hasher: hashfn, eqer: eqfn) -> hashmap { chained::mk(hasher, eqer) } @@ -297,20 +297,14 @@ fn bytes_hash() -> hashmap<[u8], V> { ret hashmap(vec::u8::hash, vec::u8::eq); } -fn hash_int(&&x: int) -> uint { int::hash(x) } -fn eq_int(&&a: int, &&b: int) -> bool { ret a == b; } - #[doc = "Construct a hashmap for int keys"] fn int_hash() -> hashmap { - ret hashmap(hash_int, eq_int); + ret hashmap(int::hash, int::eq); } -fn hash_uint(&&x: uint) -> uint { uint::hash(x) } -fn eq_uint(&&a: uint, &&b: uint) -> bool { ret a == b; } - #[doc = "Construct a hashmap for uint keys"] fn uint_hash() -> hashmap { - ret hashmap(hash_uint, eq_uint); + ret hashmap(uint::hash, uint::eq); } #[doc = " @@ -334,7 +328,7 @@ fn vec_from_set(s: set) -> [T] { #[doc = "Construct a hashmap from a vector"] fn hash_from_vec(hasher: hashfn, eqer: eqfn, - items: [(K, V)]) -> hashmap { + items: [(K, V)]) -> hashmap { let map = hashmap(hasher, eqer); vec::iter(items) { |item| let (key, value) = item; @@ -355,12 +349,12 @@ fn hash_from_bytes(items: [([u8], V)]) -> hashmap<[u8], V> { #[doc = "Construct a hashmap from a vector with int keys"] fn hash_from_ints(items: [(int, V)]) -> hashmap { - hash_from_vec(hash_int, eq_int, items) + hash_from_vec(int::hash, int::eq, items) } #[doc = "Construct a hashmap from a vector with uint keys"] fn hash_from_uints(items: [(uint, V)]) -> hashmap { - hash_from_vec(hash_uint, eq_uint, items) + hash_from_vec(uint::hash, uint::eq, items) } #[cfg(test)] diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 536ce3c294ef6..ad1501dd8b0d1 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -730,7 +730,8 @@ enum inlined_item { ii_item(@item), ii_method(def_id /* impl id */, @method), ii_native(@native_item), - ii_ctor(class_ctor, ident, [ty_param], def_id /* parent id */) + ii_ctor(class_ctor, ident, [ty_param], def_id /* parent id */), + ii_dtor(class_dtor, ident, [ty_param], def_id /* parent id */) } // diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index b2e6446d7ee01..10397e795ab7b 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -106,7 +106,7 @@ fn map_decoded_item(diag: span_handler, // don't decode and instantiate the impl, but just the method, we have to // add it to the table now: alt ii { - ii_item(_) | ii_ctor(_,_,_,_) { /* fallthrough */ } + ii_item(*) | ii_ctor(*) | ii_dtor(*) { /* fallthrough */ } ii_native(i) { cx.map.insert(i.id, node_native_item(i, native_abi_rust_intrinsic, @path)); diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index c81f6d9590fcb..9eee9a33af8f9 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -320,6 +320,7 @@ impl inlined_item_methods for inlined_item { ii_native(i) { /* FIXME: bad */ copy i.ident } ii_method(_, m) { /* FIXME: bad */ copy m.ident } ii_ctor(_, nm, _, _) { /* FIXME: bad */ copy nm } + ii_dtor(_, nm, _, _) { /* FIXME: bad */ copy nm } } } @@ -329,6 +330,7 @@ impl inlined_item_methods for inlined_item { ii_native(i) { i.id } ii_method(_, m) { m.id } ii_ctor(ctor, _, _, _) { ctor.node.id } + ii_dtor(dtor, _, _, _) { dtor.node.id } } } @@ -340,6 +342,9 @@ impl inlined_item_methods for inlined_item { ii_ctor(ctor, nm, tps, parent_id) { visit::visit_class_ctor_helper(ctor, nm, tps, parent_id, e, v); } + ii_dtor(dtor, nm, tps, parent_id) { + visit::visit_class_dtor_helper(dtor, tps, parent_id, e, v); + } } } } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 6615938b9ad9b..cddf8de479bdb 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -1,6 +1,6 @@ import either::{either, left, right}; import ast_util::spanned; -import common::{parser_common, seq_sep}; +import common::{parser_common, seq_sep_trailing_disallowed}; export attr_or_ext; export parser_attr; @@ -111,7 +111,7 @@ impl parser_attr for parser { fn parse_meta_seq() -> [@ast::meta_item] { ret self.parse_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA), + seq_sep_trailing_disallowed(token::COMMA), {|p| p.parse_meta_item()}).node; } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 0c2718c3b4b58..0520993de745b 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -4,25 +4,23 @@ import parser::parser; type seq_sep = { sep: option, - trailing_opt: bool // is trailing separator optional? + trailing_sep_allowed: bool }; -fn seq_sep(t: token::token) -> seq_sep { - ret {sep: option::some(t), trailing_opt: false}; +fn seq_sep_trailing_disallowed(t: token::token) -> seq_sep { + ret {sep: option::some(t), trailing_sep_allowed: false}; } -fn seq_sep_opt(t: token::token) -> seq_sep { - ret {sep: option::some(t), trailing_opt: true}; +fn seq_sep_trailing_allowed(t: token::token) -> seq_sep { + ret {sep: option::some(t), trailing_sep_allowed: true}; } fn seq_sep_none() -> seq_sep { - ret {sep: option::none, trailing_opt: false}; + ret {sep: option::none, trailing_sep_allowed: false}; } - fn token_to_str(reader: reader, ++token: token::token) -> str { token::to_str(*reader.interner, token) } - // This should be done with traits, once traits work impl parser_common for parser { @@ -203,7 +201,7 @@ impl parser_common for parser { else { self.expect(t); } } _ { } } - if sep.trailing_opt && self.token == ket { break; } + if sep.trailing_sep_allowed && self.token == ket { break; } v += [f(self)]; } ret v; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 7e6fb726f6737..828cd5ce7d435 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -9,7 +9,8 @@ import ast::*; import lexer::reader; import prec::{as_prec, token_to_binop}; import attr::parser_attr; -import common::{seq_sep, seq_sep_opt, seq_sep_none, token_to_str}; +import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed, + seq_sep_none, token_to_str}; import common::*;//{parser_common}; import dvec::{dvec, extensions}; @@ -167,9 +168,9 @@ class parser { } fn parse_ty_fn_decl(purity: ast::purity) -> fn_decl { - let inputs = - self.parse_unspanned_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA)) { |p| + let inputs = self.parse_unspanned_seq( + token::LPAREN, token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA)) { |p| let mode = p.parse_arg_mode(); let name = if is_plain_ident(p.token) && p.look_ahead(1u) == token::COLON { @@ -260,10 +261,10 @@ class parser { fn parse_ty_constr(fn_args: [arg]) -> @constr { let lo = self.span.lo; let path = self.parse_path_without_tps(); - let args = - self.parse_unspanned_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA), - {|p| p.parse_constr_arg(fn_args)}); + let args = self.parse_unspanned_seq( + token::LPAREN, token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + {|p| p.parse_constr_arg(fn_args)}); ret @spanned(lo, self.span.hi, {path: path, args: args, id: self.get_id()}); } @@ -271,10 +272,10 @@ class parser { fn parse_constr_in_type() -> @ty_constr { let lo = self.span.lo; let path = self.parse_path_without_tps(); - let args: [@ty_constr_arg] = - self.parse_unspanned_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA), - {|p| p.parse_type_constr_arg()}); + let args: [@ty_constr_arg] = self.parse_unspanned_seq( + token::LPAREN, token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + {|p| p.parse_type_constr_arg()}); let hi = self.span.lo; let tc: ty_constr_ = {path: path, args: args, id: self.get_id()}; ret @spanned(lo, hi, tc); @@ -389,9 +390,10 @@ class parser { self.bump(); ty_ptr(self.parse_mt()) } else if self.token == token::LBRACE { - let elems = self.parse_unspanned_seq(token::LBRACE, token::RBRACE, - seq_sep_opt(token::COMMA), - {|p| p.parse_ty_field()}); + let elems = self.parse_unspanned_seq( + token::LBRACE, token::RBRACE, + seq_sep_trailing_allowed(token::COMMA), + {|p| p.parse_ty_field()}); if vec::len(elems) == 0u { self.unexpected_last(token::RBRACE); } @@ -802,9 +804,9 @@ class parser { } else if self.token == token::LBRACKET { self.bump(); let mutbl = self.parse_mutability(); - let es = - self.parse_seq_to_end(token::RBRACKET, seq_sep(token::COMMA), - {|p| p.parse_expr()}); + let es = self.parse_seq_to_end( + token::RBRACKET, seq_sep_trailing_allowed(token::COMMA), + {|p| p.parse_expr()}); hi = self.span.hi; ex = expr_vec(es, mutbl); } else if self.token == token::POUND @@ -833,9 +835,10 @@ class parser { ex = ex_ext.node; } else if self.eat_keyword("bind") { let e = self.parse_expr_res(RESTRICT_NO_CALL_EXPRS); - let es = self.parse_unspanned_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA), - {|p| p.parse_expr_or_hole()}); + let es = self.parse_unspanned_seq( + token::LPAREN, token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + {|p| p.parse_expr_or_hole()}); hi = self.span.hi; ex = expr_bind(e, es); } else if self.eat_keyword("fail") { @@ -937,7 +940,7 @@ class parser { } let pth = self.parse_path_without_tps(); //temporary for a backwards-compatible cycle: - let sep = seq_sep(token::COMMA); + let sep = seq_sep_trailing_disallowed(token::COMMA); let mut e = none; if (self.token == token::LPAREN || self.token == token::LBRACKET) { let lo = self.span.lo; @@ -1009,10 +1012,10 @@ class parser { alt copy self.token { // expr(...) token::LPAREN if self.permits_call() { - let es_opt = - self.parse_unspanned_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA), - {|p| p.parse_expr_or_hole()}); + let es_opt = self.parse_unspanned_seq( + token::LPAREN, token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + {|p| p.parse_expr_or_hole()}); hi = self.span.hi; let nd = @@ -1479,11 +1482,10 @@ class parser { self.expect(token::RPAREN); } _ { - args = - self.parse_unspanned_seq(token::LPAREN, - token::RPAREN, - seq_sep(token::COMMA), - {|p| p.parse_pat()}); + args = self.parse_unspanned_seq( + token::LPAREN, token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + {|p| p.parse_pat()}); hi = self.span.hi; } } @@ -1783,8 +1785,9 @@ class parser { -> (fn_decl, capture_clause) { let args_or_capture_items: [arg_or_capture_item] = - self.parse_unspanned_seq(token::LPAREN, token::RPAREN, - seq_sep(token::COMMA), parse_arg_fn); + self.parse_unspanned_seq( + token::LPAREN, token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), parse_arg_fn); let inputs = either::lefts(args_or_capture_items); let capture_clause = @either::rights(args_or_capture_items); @@ -1810,10 +1813,10 @@ class parser { if self.eat(token::OROR) { [] } else { - self.parse_unspanned_seq(token::BINOP(token::OR), - token::BINOP(token::OR), - seq_sep(token::COMMA), - {|p| p.parse_fn_block_arg()}) + self.parse_unspanned_seq( + token::BINOP(token::OR), token::BINOP(token::OR), + seq_sep_trailing_disallowed(token::COMMA), + {|p| p.parse_fn_block_arg()}) } }; let output = if self.eat(token::RARROW) { @@ -1984,8 +1987,9 @@ class parser { } fn parse_iface_ref_list() -> [@iface_ref] { - self.parse_seq_to_before_end(token::LBRACE, seq_sep(token::COMMA), - {|p| p.parse_iface_ref()}) + self.parse_seq_to_before_end( + token::LBRACE, seq_sep_trailing_disallowed(token::COMMA), + {|p| p.parse_iface_ref()}) } fn parse_item_class() -> item_info { @@ -2265,11 +2269,10 @@ class parser { let mut args = [], disr_expr = none; if self.token == token::LPAREN { all_nullary = false; - let arg_tys = - self.parse_unspanned_seq(token::LPAREN, - token::RPAREN, - seq_sep(token::COMMA), - {|p| p.parse_ty(false)}); + let arg_tys = self.parse_unspanned_seq( + token::LPAREN, token::RPAREN, + seq_sep_trailing_disallowed(token::COMMA), + {|p| p.parse_ty(false)}); for arg_tys.each {|ty| args += [{ty: ty, id: self.get_id()}]; } @@ -2409,11 +2412,10 @@ class parser { // foo::bar::{a,b,c} token::LBRACE { - let idents = - self.parse_unspanned_seq(token::LBRACE, token::RBRACE, - seq_sep(token::COMMA), - {|p| - p.parse_path_list_ident()}); + let idents = self.parse_unspanned_seq( + token::LBRACE, token::RBRACE, + seq_sep_trailing_disallowed(token::COMMA), + {|p| p.parse_path_list_ident()}); let path = @{span: mk_sp(lo, self.span.hi), global: false, idents: path, rp: none, types: []}; diff --git a/src/rt/rust_box_annihilator.cpp b/src/rt/rust_box_annihilator.cpp index 7984fbd618e39..20058a8a9db8d 100644 --- a/src/rt/rust_box_annihilator.cpp +++ b/src/rt/rust_box_annihilator.cpp @@ -45,6 +45,10 @@ class annihilator : public shape::data { task->kernel->free(vec); } + void walk_unboxed_vec2(bool is_pod) { + walk_vec2(is_pod, get_unboxed_vec_data_range(dp)); + } + void walk_fixedvec2(uint16_t n_elts, size_t elt_sz, bool is_pod) { walk_vec2(is_pod, get_fixedvec_data_range(n_elts, elt_sz, dp)); } diff --git a/src/rt/rust_cc.cpp b/src/rt/rust_cc.cpp index 999e7d87c5a5f..e75ec46522fff 100644 --- a/src/rt/rust_cc.cpp +++ b/src/rt/rust_cc.cpp @@ -91,6 +91,10 @@ class irc : public shape::data { walk_vec2(is_pod, get_vec_data_range(dp)); } + void walk_unboxed_vec2(bool is_pod) { + walk_vec2(is_pod, get_unboxed_vec_data_range(dp)); + } + void walk_slice2(bool is_pod, bool is_str) { walk_vec2(is_pod, get_slice_data_range(is_str, dp)); } @@ -341,6 +345,10 @@ class mark : public shape::data { walk_vec2(is_pod, get_vec_data_range(dp)); } + void walk_unboxed_vec2(bool is_pod) { + walk_vec2(is_pod, get_unboxed_vec_data_range(dp)); + } + void walk_slice2(bool is_pod, bool is_str) { walk_vec2(is_pod, get_slice_data_range(is_str, dp)); } diff --git a/src/rt/rust_shape.cpp b/src/rt/rust_shape.cpp index 267cd6bfd9820..444bcc9e0887f 100644 --- a/src/rt/rust_shape.cpp +++ b/src/rt/rust_shape.cpp @@ -263,6 +263,11 @@ class cmp : public data { walk_vec2(is_pod, get_vec_data_range(dp)); } + void walk_unboxed_vec2(bool is_pod) { + walk_vec2(is_pod, get_unboxed_vec_data_range(dp)); + } + + void walk_slice2(bool is_pod, bool is_str) { // Slices compare just like vecs. walk_vec2(is_pod, get_slice_data_range(is_str, dp)); diff --git a/src/rt/rust_shape.h b/src/rt/rust_shape.h index a957bd1621d65..8766759cc41dd 100644 --- a/src/rt/rust_shape.h +++ b/src/rt/rust_shape.h @@ -58,6 +58,7 @@ const uint8_t SHAPE_SEND_TYDESC = 29u; const uint8_t SHAPE_RPTR = 31u; const uint8_t SHAPE_FIXEDVEC = 32u; const uint8_t SHAPE_SLICE = 33u; +const uint8_t SHAPE_UNBOXED_VEC = 34u; #ifdef _LP64 const uint8_t SHAPE_PTR = SHAPE_U64; @@ -263,9 +264,9 @@ class ctxt { private: void walk_vec0(); + void walk_unboxed_vec0(); void walk_tag0(); void walk_box0(); - void walk_box_old0(); void walk_uniq0(); void walk_struct0(); void walk_res0(); @@ -318,6 +319,7 @@ ctxt::walk() { case SHAPE_RPTR: walk_rptr0(); break; case SHAPE_FIXEDVEC: walk_fixedvec0(); break; case SHAPE_SLICE: walk_slice0(); break; + case SHAPE_UNBOXED_VEC: walk_unboxed_vec0(); break; default: abort(); } } @@ -375,6 +377,19 @@ ctxt::walk_vec0() { sp = end_sp; } +template +void +ctxt::walk_unboxed_vec0() { + bool is_pod = *sp++; + + uint16_t sp_size = get_u16_bump(sp); + const uint8_t *end_sp = sp + sp_size; + + static_cast(this)->walk_unboxed_vec1(is_pod); + + sp = end_sp; +} + template void ctxt::walk_tag0() { @@ -516,6 +531,9 @@ class print : public ctxt { void walk_vec1(bool is_pod) { DPRINT("vec<"); walk(); DPRINT(">"); } + void walk_unboxed_vec1(bool is_pod) { + DPRINT("unboxed_vec<"); walk(); DPRINT(">"); + } void walk_uniq1() { DPRINT("~<"); walk(); DPRINT(">"); } @@ -603,6 +621,11 @@ class size_of : public ctxt { sa.set(sizeof(void *), sizeof(void *)); } + void walk_unboxed_vec1(bool is_pod) { + assert(false && + "trying to compute size of dynamically sized unboxed vector"); + } + void walk_res1(const rust_fn *dtor, const uint8_t *end_sp) { abort(); // TODO } @@ -849,6 +872,12 @@ class data : public ctxt< data > { static std::pair get_vec_data_range(ptr dp); static std::pair get_vec_data_range(ptr_pair &dp); + static std::pair get_unboxed_vec_data_range(ptr dp); + static std::pair + get_unboxed_vec_data_range(ptr_pair &dp); + static ptr get_unboxed_vec_end(ptr dp); + static ptr_pair get_unboxed_vec_end(ptr_pair &dp); + static std::pair get_slice_data_range(bool is_str, ptr dp); static std::pair get_slice_data_range(bool is_str, @@ -880,6 +909,13 @@ class data : public ctxt< data > { DATA_SIMPLE(void *, walk_vec2(is_pod)); } + void walk_unboxed_vec1(bool is_pod) { + // align? + U next_dp = get_unboxed_vec_end(dp); + static_cast(this)->walk_unboxed_vec2(is_pod); + dp = next_dp; + } + void walk_slice1(bool is_pod, bool is_str) { DATA_SIMPLE(void *, walk_slice2(is_pod, is_str)); } @@ -955,7 +991,7 @@ data::walk_uniq_contents1() { if (body_td) { U body_dp(dp.box_body()); arena arena; - T sub(*static_cast(this), body_td->shape, + T sub(*static_cast(this), /*body_td->shape,*/ this->sp, body_td->shape_tables, body_dp); sub.align = true; static_cast(this)->walk_uniq_contents2(sub); @@ -1000,6 +1036,38 @@ data::get_vec_data_range(ptr_pair &dp) { return std::make_pair(start, end); } +template +std::pair +data::get_unboxed_vec_data_range(ptr dp) { + rust_vec* ptr = (rust_vec*)dp; + uint8_t* data = &ptr->data[0]; + return std::make_pair(data, data + ptr->fill); +} + +template +std::pair +data::get_unboxed_vec_data_range(ptr_pair &dp) { + std::pair fst = + get_unboxed_vec_data_range(shape::ptr(dp.fst)); + std::pair snd = + get_unboxed_vec_data_range(shape::ptr(dp.snd)); + ptr_pair start(fst.first, snd.first); + ptr_pair end(fst.second, snd.second); + return std::make_pair(start, end); +} + +template +ptr data::get_unboxed_vec_end(ptr dp) { + rust_vec* ptr = (rust_vec*)dp; + return dp + sizeof(rust_vec) + ptr->fill; +} + +template +ptr_pair data::get_unboxed_vec_end(ptr_pair &dp) { + return ptr_pair(get_unboxed_vec_end(ptr(dp.fst)), + get_unboxed_vec_end(ptr(dp.snd))); +} + template std::pair data::get_slice_data_range(bool is_str, ptr dp) { @@ -1135,6 +1203,10 @@ class log : public data { walk_vec2(is_pod, get_vec_data_range(dp)); } + void walk_unboxed_vec2(bool is_pod) { + walk_vec2(is_pod, get_unboxed_vec_data_range(dp)); + } + void walk_slice2(bool is_pod, bool is_str) { walk_vec2(is_pod, get_slice_data_range(is_str, dp)); out << "/&"; diff --git a/src/rustc/metadata/decoder.rs b/src/rustc/metadata/decoder.rs index 4df45033a77c3..dcb71c6718ae2 100644 --- a/src/rustc/metadata/decoder.rs +++ b/src/rustc/metadata/decoder.rs @@ -335,15 +335,18 @@ fn get_class_method(cdata: cmd, id: ast::node_id, name: str) -> ast::def_id { fn class_dtor(cdata: cmd, id: ast::node_id) -> option { let items = ebml::get_doc(ebml::doc(cdata.data), tag_items); + let mut found = none; let cls_items = alt maybe_find_item(id, items) { some(it) { it } - none { ret none; }}; - let mut rslt = none; - ebml::tagged_docs(cls_items, tag_item_dtor) {|f| - let did = parse_def_id(ebml::doc_data(f)); - rslt = some(translate_def_id(cdata, did)); - } - rslt + none { fail (#fmt("class_dtor: class id not found \ + when looking up dtor for %d", id)); } + }; + ebml::tagged_docs(cls_items, tag_item_dtor) {|doc| + let doc1 = ebml::get_doc(doc, tag_def_id); + let did = parse_def_id(ebml::doc_data(doc1)); + found = some(translate_def_id(cdata, did)); + }; + found } fn get_symbol(data: @[u8], id: ast::node_id) -> str { diff --git a/src/rustc/metadata/encoder.rs b/src/rustc/metadata/encoder.rs index 113fafb73b317..215f73a6d4f46 100644 --- a/src/rustc/metadata/encoder.rs +++ b/src/rustc/metadata/encoder.rs @@ -9,7 +9,7 @@ import ebml::writer; import syntax::ast::*; import syntax::print::pprust; import syntax::{ast_util, visit}; -import syntax::ast_util::local_def; +import syntax::ast_util::*; import common::*; import middle::ty; import middle::ty::node_id_to_type; @@ -206,12 +206,6 @@ fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, add_to_index(ebml_w, path, index, it.ident); encode_named_def_id(ebml_w, it.ident, local_def(ctor.node.id)); - /* Encode id for dtor */ - option::iter(m_dtor) {|dtor| - ebml_w.wr_tag(tag_item_dtor) {|| - encode_def_id(ebml_w, local_def(dtor.node.id)); - } - }; encode_class_item_paths(ebml_w, items, path + [it.ident], index); } @@ -485,8 +479,8 @@ fn encode_info_for_fn(ecx: @encode_ctxt, ebml_w: ebml::writer, encode_family(ebml_w, purity_fn_family(decl.purity)); encode_type_param_bounds(ebml_w, ecx, tps); let its_ty = node_id_to_type(ecx.tcx, id); - #debug("fn name = %s ty = %s", ident, - util::ppaux::ty_to_str(ecx.tcx, its_ty)); + #debug("fn name = %s ty = %s its node id = %d", ident, + util::ppaux::ty_to_str(ecx.tcx, its_ty), id); encode_type(ecx, ebml_w, its_ty); encode_path(ebml_w, path, ast_map::path_name(ident)); alt item { @@ -623,13 +617,23 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, encode_enum_variant_info(ecx, ebml_w, item.id, variants, path, index, tps); } - item_class(tps, ifaces, items, ctor, _dtor, rp) { + item_class(tps, ifaces, items, ctor, m_dtor, rp) { /* First, encode the fields and methods These come first because we need to write them to make the index, and the index needs to be in the item for the class itself */ let idx = encode_info_for_class(ecx, ebml_w, item.id, path, tps, items, index); + /* Encode the dtor */ + option::iter(m_dtor) {|dtor| + *index += [{val: dtor.node.id, pos: ebml_w.writer.tell()}]; + encode_info_for_fn(ecx, ebml_w, dtor.node.id, item.ident + + "_dtor", path, if tps.len() > 0u { + some(ii_dtor(dtor, item.ident, tps, + local_def(item.id))) } + else { none }, tps, ast_util::dtor_dec()); + } + /* Index the class*/ add_to_index(); /* Now, make an item for the class itself */ @@ -644,6 +648,14 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, for ifaces.each {|t| encode_iface_ref(ebml_w, ecx, t); } + /* Encode the dtor */ + /* Encode id for dtor */ + option::iter(m_dtor) {|dtor| + ebml_w.wr_tag(tag_item_dtor) {|| + encode_def_id(ebml_w, local_def(dtor.node.id)); + } + }; + /* Encode def_ids for each field and method for methods, write all the stuff get_iface_method needs to know*/ @@ -803,9 +815,7 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer, encode_info_for_item(ecx, ebml_w, i, index, *pt); /* encode ctor, then encode items */ alt i.node { - item_class(tps, _, _, ctor, _, _) { - /* this is assuming that ctors aren't inlined... - probably shouldn't assume that */ + item_class(tps, _, _, ctor, m_dtor, _) { #debug("encoding info for ctor %s %d", i.ident, ctor.node.id); *index += [{val: ctor.node.id, pos: ebml_w.writer.tell()}]; @@ -813,7 +823,7 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer, *pt, if tps.len() > 0u { some(ii_ctor(ctor, i.ident, tps, local_def(i.id))) } - else { none }, tps, ctor.node.dec) + else { none }, tps, ctor.node.dec); } _ {} } diff --git a/src/rustc/metadata/tydecode.rs b/src/rustc/metadata/tydecode.rs index 61f6210b49d72..02ba2597d7673 100644 --- a/src/rustc/metadata/tydecode.rs +++ b/src/rustc/metadata/tydecode.rs @@ -308,6 +308,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t { ret ty::mk_rptr(st.tcx, r, mt); } 'I' { ret ty::mk_vec(st.tcx, parse_mt(st, conv)); } + 'U' { ret ty::mk_unboxed_vec(st.tcx, parse_mt(st, conv)); } 'V' { let mt = parse_mt(st, conv); let v = parse_vstore(st); diff --git a/src/rustc/metadata/tyencode.rs b/src/rustc/metadata/tyencode.rs index 4893275f9531d..051ea9e444ecd 100644 --- a/src/rustc/metadata/tyencode.rs +++ b/src/rustc/metadata/tyencode.rs @@ -252,6 +252,7 @@ fn enc_sty(w: io::writer, cx: @ctxt, st: ty::sty) { enc_vstore(w, cx, v); } ty::ty_vec(mt) { w.write_char('I'); enc_mt(w, cx, mt); } + ty::ty_unboxed_vec(mt) { w.write_char('U'); enc_mt(w, cx, mt); } ty::ty_rec(fields) { w.write_str("R["/&); for fields.each {|field| diff --git a/src/rustc/middle/astencode.rs b/src/rustc/middle/astencode.rs index d604505455cb5..0d6b158a9ae5f 100644 --- a/src/rustc/middle/astencode.rs +++ b/src/rustc/middle/astencode.rs @@ -427,6 +427,12 @@ fn simplify_ast(ii: ast::inlined_item) -> ast::inlined_item { with ctor.node} with ctor}, nm, tps, parent_id) } + ast::ii_dtor(dtor, nm, tps, parent_id) { + let dtor_body = fld.fold_block(dtor.node.body); + ast::ii_dtor({node: {body: dtor_body + with dtor.node} + with dtor}, nm, tps, parent_id) + } } } @@ -464,6 +470,16 @@ fn renumber_ast(xcx: extended_decode_ctxt, ii: ast::inlined_item) with ctor.node} with ctor}, nm, new_params, new_parent) } + ast::ii_dtor(dtor, nm, tps, parent_id) { + let dtor_body = fld.fold_block(dtor.node.body); + let new_params = fold::fold_ty_params(tps, fld); + let dtor_id = fld.new_id(dtor.node.id); + let new_parent = xcx.tr_def_id(parent_id); + let new_self = fld.new_id(dtor.node.self_id); + ast::ii_dtor({node: {id: dtor_id, self_id: new_self, body: dtor_body} + with dtor}, + nm, new_params, new_parent) + } } } diff --git a/src/rustc/middle/borrowck.rs b/src/rustc/middle/borrowck.rs index 067140500102e..66118dbc38c3a 100644 --- a/src/rustc/middle/borrowck.rs +++ b/src/rustc/middle/borrowck.rs @@ -238,11 +238,13 @@ enum ptr_kind {uniq_ptr, gc_ptr, region_ptr, unsafe_ptr} // I am coining the term "components" to mean "pieces of a data // structure accessible without a dereference": enum comp_kind { - comp_tuple, comp_res, comp_variant, - comp_field(str, // name of field + comp_tuple, // elt in a tuple + comp_res, // data for a resource + comp_variant(ast::def_id), // internals to a variant of given enum + comp_field(str, // name of field ast::mutability), // declared mutability of field - comp_index(ty::t, // type of vec/str/etc being deref'd - ast::mutability) // mutability of vec content + comp_index(ty::t, // type of vec/str/etc being deref'd + ast::mutability) // mutability of vec content } // We pun on *T to mean both actual deref of a ptr as well @@ -411,7 +413,7 @@ impl to_str_methods for borrowck_ctxt { comp_index(*) { "[]" } comp_tuple { "()" } comp_res { "" } - comp_variant { "" } + comp_variant(_) { "" } } } @@ -468,7 +470,7 @@ impl to_str_methods for borrowck_ctxt { cat_comp(_, comp_field(*)) { mut_str + " field" } cat_comp(_, comp_tuple) { "tuple content" } cat_comp(_, comp_res) { "resource content" } - cat_comp(_, comp_variant) { "enum content" } + cat_comp(_, comp_variant(_)) { "enum content" } cat_comp(_, comp_index(t, _)) { alt ty::get(t).struct { ty::ty_vec(*) | ty::ty_evec(*) { @@ -514,7 +516,7 @@ impl to_str_methods for borrowck_ctxt { // mutable structure. fn inherent_mutability(ck: comp_kind) -> mutability { alt ck { - comp_tuple | comp_res | comp_variant {m_imm} + comp_tuple | comp_res | comp_variant(_) {m_imm} comp_field(_, m) | comp_index(_, m) {m} } } \ No newline at end of file diff --git a/src/rustc/middle/borrowck/categorization.rs b/src/rustc/middle/borrowck/categorization.rs index ff9d51196c91b..db22f880941b6 100644 --- a/src/rustc/middle/borrowck/categorization.rs +++ b/src/rustc/middle/borrowck/categorization.rs @@ -67,8 +67,8 @@ fn opt_deref_kind(t: ty::t) -> option { some(deref_ptr(unsafe_ptr)) } - ty::ty_enum(*) { - some(deref_comp(comp_variant)) + ty::ty_enum(did, _) { + some(deref_comp(comp_variant(did))) } ty::ty_res(*) { @@ -275,10 +275,12 @@ impl public_methods for borrowck_ctxt { } } - fn cat_variant(arg: N, cmt: cmt) -> cmt { + fn cat_variant(arg: N, + enum_did: ast::def_id, + cmt: cmt) -> cmt { @{id: arg.id(), span: arg.span(), - cat: cat_comp(cmt, comp_variant), - lp: cmt.lp.map { |l| @lp_comp(l, comp_variant) }, + cat: cat_comp(cmt, comp_variant(enum_did)), + lp: cmt.lp.map { |l| @lp_comp(l, comp_variant(enum_did)) }, mutbl: cmt.mutbl, // imm iff in an immutable context ty: self.tcx.ty(arg)} } diff --git a/src/rustc/middle/borrowck/gather_loans.rs b/src/rustc/middle/borrowck/gather_loans.rs index ed261b09b73d4..87980bf724817 100644 --- a/src/rustc/middle/borrowck/gather_loans.rs +++ b/src/rustc/middle/borrowck/gather_loans.rs @@ -338,8 +338,16 @@ impl methods for gather_loan_ctxt { } ast::pat_enum(_, some(subpats)) { // variant(x, y, z) + let enum_did = alt self.bccx.tcx.def_map +.find(pat.id) { + some(ast::def_variant(enum_did, _)) {enum_did} + e {tcx.sess.span_bug(pat.span, + #fmt["resolved to %?, \ + not variant", e])} + }; + for subpats.each { |subpat| - let subcmt = self.bccx.cat_variant(subpat, cmt); + let subcmt = self.bccx.cat_variant(subpat, enum_did, cmt); self.gather_pat(subcmt, subpat, arm_id, alt_id); } } diff --git a/src/rustc/middle/borrowck/loan.rs b/src/rustc/middle/borrowck/loan.rs index c0e6f4accf707..ee61dd9f0cf68 100644 --- a/src/rustc/middle/borrowck/loan.rs +++ b/src/rustc/middle/borrowck/loan.rs @@ -65,24 +65,23 @@ impl loan_methods for loan_ctxt { // that case, it must also be embedded in an immutable // location, or else the whole structure could be // overwritten and the component along with it. - let base_mutbl = alt req_mutbl { - m_imm { m_imm } - m_const | m_mutbl { m_const } - }; - - self.loan(cmt_base, base_mutbl); - self.ok_with_loan_of(cmt, req_mutbl) + self.loan_stable_comp(cmt, cmt_base, req_mutbl) } - cat_comp(cmt1, comp_variant) | - cat_deref(cmt1, _, uniq_ptr) { - // Variant components: the base must be immutable, because - // if it is overwritten, the types of the embedded data - // could change. - // - // Unique pointers: the base must be immutable, because if - // it is overwritten, the unique content will be freed. - self.loan(cmt1, m_imm); - self.ok_with_loan_of(cmt, req_mutbl) + cat_comp(cmt_base, comp_variant(enum_did)) { + // For enums, the memory is unstable if there are multiple + // variants, because if the enum value is overwritten then + // the memory changes type. + if ty::enum_is_univariant(self.bccx.tcx, enum_did) { + self.loan_stable_comp(cmt, cmt_base, req_mutbl) + } else { + self.loan_unstable_deref(cmt, cmt_base, req_mutbl) + } + } + cat_deref(cmt_base, _, uniq_ptr) { + // For unique pointers, the memory being pointed out is + // unstable because if the unique pointer is overwritten + // then the memory is freed. + self.loan_unstable_deref(cmt, cmt_base, req_mutbl) } cat_deref(cmt1, _, unsafe_ptr) | cat_deref(cmt1, _, gc_ptr) | @@ -94,4 +93,32 @@ impl loan_methods for loan_ctxt { } } } + + // A "stable component" is one where assigning the base of the + // component cannot cause the component itself to change types. + // Example: record fields. + fn loan_stable_comp(cmt: cmt, + cmt_base: cmt, + req_mutbl: ast::mutability) { + let base_mutbl = alt req_mutbl { + m_imm { m_imm } + m_const | m_mutbl { m_const } + }; + + self.loan(cmt_base, base_mutbl); + self.ok_with_loan_of(cmt, req_mutbl) + } + + // An "unstable deref" means a deref of a ptr/comp where, if the + // base of the deref is assigned to, pointers into the result of the + // deref would be invalidated. Examples: interior of variants, uniques. + fn loan_unstable_deref(cmt: cmt, + cmt_base: cmt, + req_mutbl: ast::mutability) { + // Variant components: the base must be immutable, because + // if it is overwritten, the types of the embedded data + // could change. + self.loan(cmt_base, m_imm); + self.ok_with_loan_of(cmt, req_mutbl) + } } diff --git a/src/rustc/middle/borrowck/preserve.rs b/src/rustc/middle/borrowck/preserve.rs index 441dfad9c2d50..98c9109fe8edf 100644 --- a/src/rustc/middle/borrowck/preserve.rs +++ b/src/rustc/middle/borrowck/preserve.rs @@ -17,7 +17,7 @@ impl public_methods for borrowck_ctxt { } cat_local(_) { // Normally, local variables are lendable, and so this - // case should never trigged. However, if we are + // case should never trigger. However, if we are // preserving an expression like a.b where the field `b` // has @ type, then it will recurse to ensure that the `a` // is stable to try and avoid rooting the value `a.b`. In @@ -43,10 +43,19 @@ impl public_methods for borrowck_ctxt { // type never changes. self.preserve(cmt_base, opt_scope_id) } - cat_comp(cmt_base, comp_variant) { - self.require_imm(cmt, cmt_base, opt_scope_id, err_mut_variant) + cat_comp(cmt_base, comp_variant(enum_did)) { + if ty::enum_is_univariant(self.tcx, enum_did) { + self.preserve(cmt_base, opt_scope_id) + } else { + // If there are multiple variants: overwriting the + // base could cause the type of this memory to change, + // so require imm. + self.require_imm(cmt, cmt_base, opt_scope_id, err_mut_variant) + } } cat_deref(cmt_base, _, uniq_ptr) { + // Overwriting the base could cause this memory to be + // freed, so require imm. self.require_imm(cmt, cmt_base, opt_scope_id, err_mut_uniq) } cat_deref(_, _, region_ptr) { diff --git a/src/rustc/middle/trans/alt.rs b/src/rustc/middle/trans/alt.rs index 44f26ad69470f..2fd4374dc093b 100644 --- a/src/rustc/middle/trans/alt.rs +++ b/src/rustc/middle/trans/alt.rs @@ -462,8 +462,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], // Unbox in case of a box field if any_box_pat(m, col) { let box = Load(bcx, val); - let box_ty = node_id_type(bcx, pat_id); - let box_no_addrspace = non_gc_box_cast(bcx, box, box_ty); + let box_no_addrspace = non_gc_box_cast(bcx, box); let unboxed = GEPi(bcx, box_no_addrspace, [0u, abi::box_field_body]); compile_submatch(bcx, enter_box(dm, m, col, val), [unboxed] + vals_left, chk, exits); @@ -472,8 +471,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef], if any_uniq_pat(m, col) { let box = Load(bcx, val); - let box_ty = node_id_type(bcx, pat_id); - let box_no_addrspace = non_gc_box_cast(bcx, box, box_ty); + let box_no_addrspace = non_gc_box_cast(bcx, box); let unboxed = GEPi(bcx, box_no_addrspace, [0u, abi::box_field_body]); compile_submatch(bcx, enter_uniq(dm, m, col, val), [unboxed] + vals_left, chk, exits); diff --git a/src/rustc/middle/trans/base.rs b/src/rustc/middle/trans/base.rs index 0721603076792..2423f04d11fed 100644 --- a/src/rustc/middle/trans/base.rs +++ b/src/rustc/middle/trans/base.rs @@ -50,6 +50,7 @@ import type_of::type_of; // Issue #1873 import syntax::ast_map::{path, path_mod, path_name}; import std::smallintmap; +import option::is_none; // Destinations @@ -380,10 +381,8 @@ fn malloc_raw(bcx: block, t: ty::t, heap: heap) -> ValueRef { fn malloc_general(bcx: block, t: ty::t, heap: heap) -> {box: ValueRef, body: ValueRef} { let _icx = bcx.insn_ctxt("malloc_general"); - let mk_ty = alt heap { heap_shared { ty::mk_imm_box } - heap_exchange { ty::mk_imm_uniq } }; let box = malloc_raw(bcx, t, heap); - let non_gc_box = non_gc_box_cast(bcx, box, mk_ty(bcx.tcx(), t)); + let non_gc_box = non_gc_box_cast(bcx, box); let body = GEPi(bcx, non_gc_box, [0u, abi::box_field_body]); ret {box: box, body: body}; } @@ -750,6 +749,12 @@ fn make_free_glue(bcx: block, v: ValueRef, t: ty::t) { ty::ty_opaque_closure_ptr(ck) { closure::make_opaque_cbox_free_glue(bcx, ck, v) } + ty::ty_class(did,substs) { + // Call the dtor if there is one + option::map_default(ty::ty_dtor(bcx.tcx(), did), bcx) {|dt_id| + trans_class_drop(bcx, v, dt_id, did, substs) + } + } _ { bcx } }; build_return(bcx); @@ -1207,7 +1212,8 @@ fn lazily_emit_tydesc_glue(ccx: @crate_ctxt, field: uint, } } -fn call_tydesc_glue_full(cx: block, v: ValueRef, tydesc: ValueRef, +// See [Note-arg-mode] +fn call_tydesc_glue_full(++cx: block, v: ValueRef, tydesc: ValueRef, field: uint, static_ti: option<@tydesc_info>) { let _icx = cx.insn_ctxt("call_tydesc_glue_full"); lazily_emit_tydesc_glue(cx.ccx(), field, static_ti); @@ -1245,8 +1251,9 @@ fn call_tydesc_glue_full(cx: block, v: ValueRef, tydesc: ValueRef, C_null(T_ptr(T_ptr(cx.ccx().tydesc_type))), llrawptr]); } -fn call_tydesc_glue(cx: block, v: ValueRef, t: ty::t, field: uint) -> - block { +// See [Note-arg-mode] +fn call_tydesc_glue(++cx: block, v: ValueRef, t: ty::t, field: uint) + -> block { let _icx = cx.insn_ctxt("call_tydesc_glue"); let mut ti = none; let td = get_tydesc(cx.ccx(), t, ti); @@ -2286,7 +2293,7 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) } some(none) { fn_id } // Not inlinable none { // Not seen yet - alt check csearch::maybe_get_item_ast( + alt csearch::maybe_get_item_ast( ccx.tcx, fn_id, bind astencode::decode_inlined_item(_, _, ccx.maps, _, _)) { @@ -2326,6 +2333,10 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) trans_item(ccx, *item); local_def(my_id) } + csearch::found_parent(_, _) { + ccx.sess.bug("maybe_get_item_ast returned a found_parent \ + with a non-item parent"); + } csearch::found(ast::ii_method(impl_did, mth)) { ccx.external.insert(fn_id, some(mth.id)); let {bounds: impl_bnds, rp: _, ty: impl_ty} = @@ -2339,6 +2350,10 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) } local_def(mth.id) } + csearch::found(ast::ii_dtor(dtor, nm, tps, parent_id)) { + ccx.external.insert(fn_id, some(dtor.node.id)); + local_def(dtor.node.id) + } } } } @@ -2679,11 +2694,11 @@ fn trans_lval(cx: block, e: @ast::expr) -> lval_result { let t = expr_ty(cx, base); let val = alt check ty::get(t).struct { ty::ty_box(_) { - let non_gc_val = non_gc_box_cast(sub.bcx, sub.val, t); + let non_gc_val = non_gc_box_cast(sub.bcx, sub.val); GEPi(sub.bcx, non_gc_val, [0u, abi::box_field_body]) } ty::ty_uniq(_) { - let non_gc_val = non_gc_box_cast(sub.bcx, sub.val, t); + let non_gc_val = non_gc_box_cast(sub.bcx, sub.val); GEPi(sub.bcx, non_gc_val, [0u, abi::box_field_body]) } ty::ty_res(_, _, _) { @@ -2711,10 +2726,11 @@ Before taking a pointer to the inside of a box it should be cast into address space 0. Otherwise the resulting (non-box) pointer will be in the wrong address space and thus be the wrong type. "] -fn non_gc_box_cast(cx: block, val: ValueRef, t: ty::t) -> ValueRef { +fn non_gc_box_cast(cx: block, val: ValueRef) -> ValueRef { #debug("non_gc_box_cast"); add_comment(cx, "non_gc_box_cast"); - let non_gc_t = type_of_non_gc_box(cx.ccx(), t); + assert(llvm::LLVMGetPointerAddressSpace(val_ty(val)) as uint == 1u); + let non_gc_t = T_ptr(llvm::LLVMGetElementType(val_ty(val))); PointerCast(cx, val, non_gc_t) } @@ -3111,8 +3127,9 @@ fn body_contains_ret(body: ast::blk) -> bool { cx.found } +// See [Note-arg-mode] fn trans_call_inner( - in_cx: block, + ++in_cx: block, call_info: option, fn_expr_ty: ty::t, ret_ty: ty::t, @@ -3240,8 +3257,8 @@ fn need_invoke(bcx: block) -> bool { _ { } } cur = alt cur.parent { - parent_some(next) { next } - parent_none { ret false; } + some(next) { next } + none { ret false; } } } } @@ -3262,7 +3279,7 @@ fn in_lpad_scope_cx(bcx: block, f: fn(scope_info)) { loop { alt bcx.kind { block_scope(inf) { - if inf.cleanups.len() > 0u || bcx.parent == parent_none { + if inf.cleanups.len() > 0u || is_none(bcx.parent) { f(inf); ret; } } @@ -3471,11 +3488,11 @@ fn add_root_cleanup(bcx: block, scope_id: ast::node_id, some({id, _}) if id == scope_id { ret bcx_sid; } _ { alt bcx_sid.parent { - parent_none { + none { bcx.tcx().sess.bug( #fmt["no enclosing scope with id %d", scope_id]); } - parent_some(bcx_par) { bcx_par } + some(bcx_par) { bcx_par } } } } @@ -3785,7 +3802,10 @@ fn do_spill(bcx: block, v: ValueRef, t: ty::t) -> ValueRef { // Since this function does *not* root, it is the caller's responsibility to // ensure that the referent is pointed to by a root. -fn do_spill_noroot(cx: block, v: ValueRef) -> ValueRef { +// [Note-arg-mode] +// ++ mode is temporary, due to how borrowck treats enums. With hope, +// will go away anyway when we get rid of modes. +fn do_spill_noroot(++cx: block, v: ValueRef) -> ValueRef { let llptr = alloca(cx, val_ty(v)); Store(cx, v, llptr); ret llptr; @@ -3878,11 +3898,8 @@ fn trans_fail_expr(bcx: block, sp_opt: option, bcx = expr_res.bcx; if ty::type_is_str(e_ty) { - let unit_ty = ty::mk_mach_uint(tcx, ast::ty_u8); - let vec_ty = ty::mk_vec(tcx, {ty: unit_ty, mutbl: ast::m_imm}); - let unit_llty = type_of(ccx, unit_ty); - let body = tvec::get_bodyptr(bcx, expr_res.val, vec_ty); - let data = tvec::get_dataptr(bcx, body, unit_llty); + let body = tvec::get_bodyptr(bcx, expr_res.val); + let data = tvec::get_dataptr(bcx, body); ret trans_fail_value(bcx, sp_opt, data); } else if bcx.unreachable || ty::type_is_bot(e_ty) { ret bcx; @@ -3970,9 +3987,9 @@ fn trans_break_cont(bcx: block, to_end: bool) _ {} } unwind = alt unwind.parent { - parent_some(cx) { cx } + some(cx) { cx } // This is a return from a loop body block - parent_none { + none { Store(bcx, C_bool(!to_end), bcx.fcx.llretptr); cleanup_and_leave(bcx, none, some(bcx.fcx.llreturn)); Unreachable(bcx); @@ -4090,7 +4107,7 @@ fn trans_stmt(cx: block, s: ast::stmt) -> block { // You probably don't want to use this one. See the // next three functions instead. -fn new_block(cx: fn_ctxt, parent: block_parent, +kind: block_kind, +fn new_block(cx: fn_ctxt, parent: option, +kind: block_kind, name: str, opt_node_info: option) -> block { let s = if cx.ccx.sess.opts.save_temps || cx.ccx.sess.opts.debuginfo { @@ -4099,19 +4116,10 @@ fn new_block(cx: fn_ctxt, parent: block_parent, +kind: block_kind, let llbb: BasicBlockRef = str::as_c_str(s, {|buf| llvm::LLVMAppendBasicBlock(cx.llfn, buf) }); - let bcx = @{llbb: llbb, - mut terminated: false, - mut unreachable: false, - parent: parent, - kind: kind, - node_info: opt_node_info, - fcx: cx}; - alt parent { - parent_some(cx) { + let bcx = mk_block(llbb, parent, kind, opt_node_info, cx); + option::iter(parent) {|cx| if cx.unreachable { Unreachable(bcx); } - } - _ {} - } + }; ret bcx; } @@ -4122,20 +4130,20 @@ fn simple_block_scope() -> block_kind { // Use this when you're at the top block of a function or the like. fn top_scope_block(fcx: fn_ctxt, opt_node_info: option) -> block { - ret new_block(fcx, parent_none, simple_block_scope(), + ret new_block(fcx, none, simple_block_scope(), "function top level", opt_node_info); } fn scope_block(bcx: block, opt_node_info: option, n: str) -> block { - ret new_block(bcx.fcx, parent_some(bcx), simple_block_scope(), + ret new_block(bcx.fcx, some(bcx), simple_block_scope(), n, opt_node_info); } fn loop_scope_block(bcx: block, loop_break: block, n: str, opt_node_info: option) -> block { - ret new_block(bcx.fcx, parent_some(bcx), block_scope({ + ret new_block(bcx.fcx, some(bcx), block_scope({ loop_break: some(loop_break), mut cleanups: [], mut cleanup_paths: [], @@ -4146,17 +4154,11 @@ fn loop_scope_block(bcx: block, loop_break: block, n: str, // Use this when you're making a general CFG BB within a scope. fn sub_block(bcx: block, n: str) -> block { - ret new_block(bcx.fcx, parent_some(bcx), block_non_scope, n, none); + new_block(bcx.fcx, some(bcx), block_non_scope, n, none) } fn raw_block(fcx: fn_ctxt, llbb: BasicBlockRef) -> block { - ret @{llbb: llbb, - mut terminated: false, - mut unreachable: false, - parent: parent_none, - kind: block_non_scope, - node_info: none, - fcx: fcx}; + mk_block(llbb, none, block_non_scope, none, fcx) } @@ -4231,8 +4233,8 @@ fn cleanup_and_leave(bcx: block, upto: option, _ {} } cur = alt cur.parent { - parent_some(next) { next } - parent_none { assert option::is_none(upto); break; } + some(next) { next } + none { assert is_none(upto); break; } }; } alt leave { diff --git a/src/rustc/middle/trans/common.rs b/src/rustc/middle/trans/common.rs index 9dc8fdd4557c4..3bac52a949a0b 100644 --- a/src/rustc/middle/trans/common.rs +++ b/src/rustc/middle/trans/common.rs @@ -350,32 +350,44 @@ type node_info = { // code. Each basic block we generate is attached to a function, typically // with many basic blocks per function. All the basic blocks attached to a // function are organized as a directed graph. -type block = @{ +class block_ { // The BasicBlockRef returned from a call to // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic // block to the function pointed to by llfn. We insert // instructions into that block by way of this block context. // The block pointing to this one in the function's digraph. - llbb: BasicBlockRef, - mut terminated: bool, - mut unreachable: bool, - parent: block_parent, + let llbb: BasicBlockRef; + let mut terminated: bool; + let mut unreachable: bool; + let parent: option; // The 'kind' of basic block this is. - kind: block_kind, + let kind: block_kind; // info about the AST node this block originated from, if any - node_info: option, + let node_info: option; // The function context for the function to which this block is // attached. - fcx: fn_ctxt -}; + let fcx: fn_ctxt; + new(llbb: BasicBlockRef, parent: option, -kind: block_kind, + node_info: option, fcx: fn_ctxt) { + // sigh + self.llbb = llbb; self.terminated = false; self.unreachable = false; + self.parent = parent; self.kind = kind; self.node_info = node_info; + self.fcx = fcx; + } +} + +/* This must be enum and not type, or trans goes into an infinite loop (#2572) + */ +enum block = @block_; + +fn mk_block(llbb: BasicBlockRef, parent: option, -kind: block_kind, + node_info: option, fcx: fn_ctxt) -> block { + block(@block_(llbb, parent, kind, node_info, fcx)) +} // First two args are retptr, env const first_real_arg: uint = 2u; -// FIXME move blocks to a class once those are finished, and simply use -// option for this. (#2532) -enum block_parent { parent_none, parent_some(block), } - type result = {bcx: block, val: ValueRef}; type result_t = {bcx: block, val: ValueRef, ty: ty::t}; @@ -412,7 +424,11 @@ fn in_scope_cx(cx: block, f: fn(scope_info)) { } fn block_parent(cx: block) -> block { - alt check cx.parent { parent_some(b) { b } } + alt cx.parent { + some(b) { b } + none { cx.sess().bug(#fmt("block_parent called on root block %?", + cx)); } + } } // Accessors diff --git a/src/rustc/middle/trans/debuginfo.rs b/src/rustc/middle/trans/debuginfo.rs index cd6ab2aa00a93..f45bb4d5011ea 100644 --- a/src/rustc/middle/trans/debuginfo.rs +++ b/src/rustc/middle/trans/debuginfo.rs @@ -236,8 +236,8 @@ fn create_block(cx: block) -> @metadata { let mut cx = cx; while option::is_none(cx.node_info) { alt cx.parent { - parent_some(b) { cx = b; } - parent_none { fail; } + some(b) { cx = b; } + none { fail; } } } let sp = option::get(cx.node_info).span; @@ -254,8 +254,8 @@ fn create_block(cx: block) -> @metadata { }*/ let parent = alt cx.parent { - parent_none { create_function(cx.fcx).node } - parent_some(bcx) { create_block(bcx).node } + none { create_function(cx.fcx).node } + some(bcx) { create_block(bcx).node } }; let file_node = create_file(cx.ccx(), fname); let unique_id = alt cache.find(LexicalBlockTag) { @@ -658,8 +658,8 @@ fn create_local_var(bcx: block, local: @ast::local) let tymd = create_ty(cx, ty, local.node.ty); let filemd = create_file(cx, loc.file.name); let context = alt bcx.parent { - parent_none { create_function(bcx.fcx).node } - parent_some(_) { create_block(bcx).node } + none { create_function(bcx.fcx).node } + some(_) { create_block(bcx).node } }; let mdnode = create_var(tg, context, name, filemd.node, loc.line as int, tymd.node); @@ -761,9 +761,10 @@ fn create_function(fcx: fn_ctxt) -> @metadata { (nm, decl.output, ctor_id) } ast_map::class_ctor(ctor,_) { - fcx.ccx.sess.span_bug(ctor.span, "create_function: \ - expected a resource ctor here"); } + // FIXME: output type may be wrong (#2194) + (nm, ctor.node.dec.output, ctor.node.id) } + } } ast_map::node_expr(expr) { alt expr.node { diff --git a/src/rustc/middle/trans/reachable.rs b/src/rustc/middle/trans/reachable.rs index 25c60aa2793b8..2b70846787420 100644 --- a/src/rustc/middle/trans/reachable.rs +++ b/src/rustc/middle/trans/reachable.rs @@ -9,7 +9,9 @@ import syntax::ast::*; import syntax::{visit, ast_util, ast_map}; import syntax::ast_util::def_id_of_def; import syntax::attr; +import syntax::print::pprust::expr_to_str; import std::map::hashmap; +import driver::session::*; export map, find_reachable; @@ -58,7 +60,11 @@ fn traverse_export(cx: ctx, exp_id: node_id) { fn traverse_def_id(cx: ctx, did: def_id) { if did.crate != local_crate { ret; } - alt cx.tcx.items.get(did.node) { + let n = alt cx.tcx.items.find(did.node) { + none { ret; } // This can happen for self, for example + some(n) { n } + }; + alt n { ast_map::node_item(item, _) { traverse_public_item(cx, item); } ast_map::node_method(_, impl_id, _) { traverse_def_id(cx, impl_id); } ast_map::node_native_item(item, _, _) { cx.rmap.insert(item.id, ()); } @@ -111,6 +117,10 @@ fn traverse_public_item(cx: ctx, item: @item) { cx.rmap.insert(ctor.node.id, ()); option::iter(m_dtor) {|dtor| cx.rmap.insert(dtor.node.id, ()); + // dtors don't have attrs + if tps.len() > 0u { + traverse_inline_body(cx, dtor.node.body); + } } for vec::each(items) {|item| alt item.node { @@ -134,7 +144,13 @@ fn traverse_inline_body(cx: ctx, body: blk) { fn traverse_expr(e: @expr, cx: ctx, v: visit::vt) { alt e.node { expr_path(_) { - traverse_def_id(cx, def_id_of_def(cx.tcx.def_map.get(e.id))); + alt cx.tcx.def_map.find(e.id) { + some(d) { + traverse_def_id(cx, def_id_of_def(d)); + } + none { cx.tcx.sess.span_bug(e.span, #fmt("Unbound node \ + id %? while traversing %s", e.id, expr_to_str(e))); } + } } expr_field(_, _, _) { alt cx.method_map.find(e.id) { diff --git a/src/rustc/middle/trans/reflect.rs b/src/rustc/middle/trans/reflect.rs index 414693c628758..4b6e92aa5adaf 100644 --- a/src/rustc/middle/trans/reflect.rs +++ b/src/rustc/middle/trans/reflect.rs @@ -257,6 +257,7 @@ impl methods for reflector { }; self.visit("closure_ptr", [self.c_uint(ckval)]) } + ty::ty_unboxed_vec(mt) { self.bracketed_mt("vec", mt, []) } } } } diff --git a/src/rustc/middle/trans/shape.rs b/src/rustc/middle/trans/shape.rs index b875b2adc2fa0..2b21e3e954eaf 100644 --- a/src/rustc/middle/trans/shape.rs +++ b/src/rustc/middle/trans/shape.rs @@ -93,6 +93,7 @@ const shape_send_tydesc: u8 = 29u8; const shape_rptr: u8 = 31u8; const shape_fixedvec: u8 = 32u8; const shape_slice: u8 = 33u8; +const shape_unboxed_vec: u8 = 34u8; fn mk_global(ccx: @crate_ctxt, name: str, llval: ValueRef, internal: bool) -> ValueRef { @@ -225,6 +226,9 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { ty::ty_float(ast::ty_f64) { [shape_f64] } ty::ty_estr(ty::vstore_uniq) | ty::ty_str { + // FIXME: we want to emit this as a unique pointer to an unboxed vec, + // but it doesn't work at the moment, since trans doesn't put + // tydescs in string boxes... let mut s = [shape_vec]; add_bool(s, true); // type is POD let unit_ty = ty::mk_mach_uint(ccx.tcx, ast::ty_u8); @@ -265,13 +269,17 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t) -> [u8] { add_substr(s, shape_of(ccx, mt.ty)); s } - ty::ty_evec(mt, ty::vstore_uniq) | - ty::ty_vec(mt) { - let mut s = [shape_vec]; + ty::ty_unboxed_vec(mt) { + let mut s = [shape_unboxed_vec]; add_bool(s, ty::type_is_pod(ccx.tcx, mt.ty)); add_substr(s, shape_of(ccx, mt.ty)); s } + ty::ty_evec(mt, ty::vstore_uniq) | + ty::ty_vec(mt) { + shape_of(ccx, + ty::mk_imm_uniq(ccx.tcx, ty::mk_unboxed_vec(ccx.tcx, mt))) + } ty::ty_estr(ty::vstore_fixed(n)) { let mut s = [shape_fixedvec]; diff --git a/src/rustc/middle/trans/tvec.rs b/src/rustc/middle/trans/tvec.rs index fa21927d74328..b220ec7cd42fe 100644 --- a/src/rustc/middle/trans/tvec.rs +++ b/src/rustc/middle/trans/tvec.rs @@ -5,11 +5,12 @@ import back::abi; import base::{call_memmove, INIT, copy_val, load_if_immediate, get_tydesc, sub_block, do_spill_noroot, - dest, bcx_icx}; + dest, bcx_icx, non_gc_box_cast}; import syntax::codemap::span; import shape::llsize_of; import build::*; import common::*; +import util::ppaux::ty_to_str; fn get_fill(bcx: block, vptr: ValueRef) -> ValueRef { let _icx = bcx.insn_ctxt("tvec::get_fill"); @@ -22,28 +23,14 @@ fn get_alloc(bcx: block, vptr: ValueRef) -> ValueRef { Load(bcx, GEPi(bcx, vptr, [0u, abi::vec_elt_alloc])) } -fn get_bodyptr(bcx: block, vptr: ValueRef, vec_ty: ty::t) -> ValueRef { - let ccx = bcx.ccx(); - alt ty::get(vec_ty).struct { - ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) - | ty::ty_vec(_) | ty::ty_str { - let boxptr = PointerCast(bcx, vptr, T_ptr(T_box_header(ccx))); - let bodyptr = GEPi(bcx, boxptr, [1u]); - let unit_ty = ty::sequence_element_type(bcx.tcx(), vec_ty); - let llunit_ty = type_of::type_of(ccx, unit_ty); - PointerCast(bcx, bodyptr, T_ptr(T_vec(ccx, llunit_ty))) - } - _ { - vptr - } - } +fn get_bodyptr(bcx: block, vptr: ValueRef) -> ValueRef { + non_gc_box_cast(bcx, GEPi(bcx, vptr, [0u, abi::box_field_body])) } -fn get_dataptr(bcx: block, vptr: ValueRef, unit_ty: TypeRef) +fn get_dataptr(bcx: block, vptr: ValueRef) -> ValueRef { let _icx = bcx.insn_ctxt("tvec::get_dataptr"); - let ptr = GEPi(bcx, vptr, [0u, abi::vec_elt_elems]); - PointerCast(bcx, ptr, T_ptr(unit_ty)) + GEPi(bcx, vptr, [0u, abi::vec_elt_elems, 0u]) } fn pointer_add(bcx: block, ptr: ValueRef, bytes: ValueRef) -> ValueRef { @@ -57,17 +44,14 @@ fn alloc_uniq_raw(bcx: block, unit_ty: ty::t, fill: ValueRef, alloc: ValueRef) -> result { let _icx = bcx.insn_ctxt("tvec::alloc_uniq_raw"); let ccx = bcx.ccx(); - let llunitty = type_of::type_of(ccx, unit_ty); - let llvecty = T_vec(ccx, llunitty); - let vecsize = Add(bcx, alloc, llsize_of(ccx, llvecty)); - let vecbodyty = unit_ty; // FIXME: This is not the correct type (#2536) + + let vecbodyty = ty::mk_mut_unboxed_vec(bcx.tcx(), unit_ty); + let vecsize = Add(bcx, alloc, llsize_of(ccx, ccx.opaque_vec_type)); + let {box, body} = base::malloc_unique_dyn(bcx, vecbodyty, vecsize); - let boxptr = PointerCast(bcx, box, - T_unique_ptr(T_unique(bcx.ccx(), llvecty))); - let bodyptr = PointerCast(bcx, body, T_ptr(llvecty)); - Store(bcx, fill, GEPi(bcx, bodyptr, [0u, abi::vec_elt_fill])); - Store(bcx, alloc, GEPi(bcx, bodyptr, [0u, abi::vec_elt_alloc])); - ret {bcx: bcx, val: boxptr}; + Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill])); + Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc])); + ret {bcx: bcx, val: box}; } fn alloc_uniq(bcx: block, unit_ty: ty::t, elts: uint) -> result { @@ -86,19 +70,14 @@ fn alloc_uniq(bcx: block, unit_ty: ty::t, elts: uint) -> result { fn duplicate_uniq(bcx: block, vptr: ValueRef, vec_ty: ty::t) -> result { let _icx = bcx.insn_ctxt("tvec::duplicate_uniq"); let ccx = bcx.ccx(); - let body_ptr = get_bodyptr(bcx, vptr, vec_ty); + let body_ptr = get_bodyptr(bcx, vptr); let fill = get_fill(bcx, body_ptr); let size = Add(bcx, fill, llsize_of(ccx, ccx.opaque_vec_type)); let unit_ty = ty::sequence_element_type(bcx.tcx(), vec_ty); - let llunitty = type_of::type_of(ccx, unit_ty); - let llvecty = T_vec(ccx, llunitty); - let vecbodyty = unit_ty; // FIXME: This is not the correct type (#2536) + let vecbodyty = ty::mk_mut_unboxed_vec(bcx.tcx(), unit_ty); let {box: newptr, body: new_body_ptr} = base::malloc_unique_dyn(bcx, vecbodyty, size); - let newptr = PointerCast(bcx, newptr, - T_unique_ptr(T_unique(bcx.ccx(), llvecty))); - let new_body_ptr = PointerCast(bcx, new_body_ptr, T_ptr(llvecty)); call_memmove(bcx, new_body_ptr, body_ptr, size); Store(bcx, fill, GEPi(bcx, new_body_ptr, [0u, abi::vec_elt_alloc])); @@ -167,8 +146,7 @@ fn trans_evec(bcx: block, args: [@ast::expr], ast::vstore_uniq { let {bcx, val} = alloc_uniq(bcx, unit_ty, args.len()); add_clean_free(bcx, val, true); - let body = get_bodyptr(bcx, val, vec_ty); - let dataptr = get_dataptr(bcx, body, llunitty); + let dataptr = get_dataptr(bcx, get_bodyptr(bcx, val)); {bcx: bcx, val: val, dataptr: dataptr} } ast::vstore_box { @@ -249,10 +227,8 @@ fn get_base_and_len(cx: block, v: ValueRef, e_ty: ty::t) (base, len) } ty::vstore_uniq { - let body = tvec::get_bodyptr(cx, v, vec_ty); - let base = tvec::get_dataptr(cx, body, llunitty); - let len = tvec::get_fill(cx, body); - (base, len) + let body = tvec::get_bodyptr(cx, v); + (tvec::get_dataptr(cx, body), tvec::get_fill(cx, body)) } ty::vstore_box { cx.ccx().sess.unimpl("unhandled tvec::get_base_and_len"); @@ -305,14 +281,11 @@ fn trans_append(bcx: block, vec_ty: ty::t, lhsptr: ValueRef, let ccx = bcx.ccx(); let unit_ty = ty::sequence_element_type(ccx.tcx, vec_ty); let strings = ty::type_is_str(vec_ty); - let llunitty = type_of::type_of(ccx, unit_ty); let lhs = Load(bcx, lhsptr); let self_append = ICmp(bcx, lib::llvm::IntEQ, lhs, rhs); - let lbody = get_bodyptr(bcx, lhs, vec_ty); - let rbody = get_bodyptr(bcx, rhs, vec_ty); - let lfill = get_fill(bcx, lbody); - let rfill = get_fill(bcx, rbody); + let lfill = get_fill(bcx, get_bodyptr(bcx, lhs)); + let rfill = get_fill(bcx, get_bodyptr(bcx, rhs)); let mut new_fill = Add(bcx, lfill, rfill); if strings { new_fill = Sub(bcx, new_fill, C_int(ccx, 1)); } let opaque_lhs = PointerCast(bcx, lhsptr, @@ -323,9 +296,9 @@ fn trans_append(bcx: block, vec_ty: ty::t, lhsptr: ValueRef, let lhs = Load(bcx, lhsptr); let rhs = Select(bcx, self_append, lhs, rhs); - let lbody = get_bodyptr(bcx, lhs, vec_ty); + let lbody = get_bodyptr(bcx, lhs); - let lhs_data = get_dataptr(bcx, lbody, llunitty); + let lhs_data = get_dataptr(bcx, lbody); let mut lhs_off = lfill; if strings { lhs_off = Sub(bcx, lhs_off, C_int(ccx, 1)); } let write_ptr = pointer_add(bcx, lhs_data, lhs_off); @@ -350,7 +323,7 @@ fn trans_append_literal(bcx: block, vptrptr: ValueRef, vec_ty: ty::t, let scratch = base::alloca(bcx, elt_llty); for vec::each(vals) {|val| bcx = base::trans_expr_save_in(bcx, val, scratch); - let vptr = get_bodyptr(bcx, Load(bcx, vptrptr), vec_ty); + let vptr = get_bodyptr(bcx, Load(bcx, vptrptr)); let old_fill = get_fill(bcx, vptr); let new_fill = Add(bcx, old_fill, elt_sz); let do_grow = ICmp(bcx, lib::llvm::IntUGT, new_fill, @@ -361,9 +334,9 @@ fn trans_append_literal(bcx: block, vptrptr: ValueRef, vec_ty: ty::t, Call(bcx, ccx.upcalls.vec_grow, [pt, new_fill]); bcx }; - let vptr = get_bodyptr(bcx, Load(bcx, vptrptr), vec_ty); + let vptr = get_bodyptr(bcx, Load(bcx, vptrptr)); set_fill(bcx, vptr, new_fill); - let targetptr = pointer_add(bcx, get_dataptr(bcx, vptr, elt_llty), + let targetptr = pointer_add(bcx, get_dataptr(bcx, vptr), old_fill); call_memmove(bcx, targetptr, scratch, elt_sz); } @@ -387,18 +360,15 @@ fn trans_add(bcx: block, vec_ty: ty::t, lhs: ValueRef, ret base::store_in_dest(bcx, n, dest); } - let lhs_body = get_bodyptr(bcx, lhs, vec_ty); - let rhs_body = get_bodyptr(bcx, rhs, vec_ty); - - let lhs_fill = get_fill(bcx, lhs_body); - let rhs_fill = get_fill(bcx, rhs_body); + let lhs_fill = get_fill(bcx, get_bodyptr(bcx, lhs)); + let rhs_fill = get_fill(bcx, get_bodyptr(bcx, rhs)); let new_fill = Add(bcx, lhs_fill, rhs_fill); let mut {bcx: bcx, val: new_vec_ptr} = alloc_uniq_raw(bcx, unit_ty, new_fill, new_fill); - let new_vec_body_ptr = get_bodyptr(bcx, new_vec_ptr, vec_ty); + let new_vec_body_ptr = get_bodyptr(bcx, new_vec_ptr); let write_ptr_ptr = do_spill_noroot - (bcx, get_dataptr(bcx, new_vec_body_ptr, llunitty)); + (bcx, get_dataptr(bcx, new_vec_body_ptr)); let copy_fn = fn@(bcx: block, addr: ValueRef, _ty: ty::t) -> block { let ccx = bcx.ccx(); @@ -451,19 +421,14 @@ fn iter_vec_raw(bcx: block, data_ptr: ValueRef, vec_ty: ty::t, fn iter_vec_uniq(bcx: block, vptr: ValueRef, vec_ty: ty::t, fill: ValueRef, f: iter_vec_block) -> block { let _icx = bcx.insn_ctxt("tvec::iter_vec_uniq"); - let ccx = bcx.ccx(); - let unit_ty = ty::sequence_element_type(bcx.tcx(), vec_ty); - let llunitty = type_of::type_of(ccx, unit_ty); - let body_ptr = get_bodyptr(bcx, vptr, vec_ty); - let data_ptr = get_dataptr(bcx, body_ptr, llunitty); + let data_ptr = get_dataptr(bcx, get_bodyptr(bcx, vptr)); iter_vec_raw(bcx, data_ptr, vec_ty, fill, f) } fn iter_vec(bcx: block, vptr: ValueRef, vec_ty: ty::t, f: iter_vec_block) -> block { let _icx = bcx.insn_ctxt("tvec::iter_vec"); - let body_ptr = get_bodyptr(bcx, vptr, vec_ty); - let fill = get_fill(bcx, body_ptr); + let fill = get_fill(bcx, get_bodyptr(bcx, vptr)); ret iter_vec_uniq(bcx, vptr, vec_ty, fill, f); } diff --git a/src/rustc/middle/trans/type_of.rs b/src/rustc/middle/trans/type_of.rs index a94a8116be03d..fb9112fc751ff 100644 --- a/src/rustc/middle/trans/type_of.rs +++ b/src/rustc/middle/trans/type_of.rs @@ -99,6 +99,9 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef { ty::ty_vec(mt) { T_unique_ptr(T_unique(cx, T_vec(cx, type_of(cx, mt.ty)))) } + ty::ty_unboxed_vec(mt) { + T_vec(cx, type_of(cx, mt.ty)) + } ty::ty_ptr(mt) { T_ptr(type_of(cx, mt.ty)) } ty::ty_rptr(_, mt) { T_ptr(type_of(cx, mt.ty)) } diff --git a/src/rustc/middle/ty.rs b/src/rustc/middle/ty.rs index bbbdafa6a7a05..8ffc0f36d2039 100644 --- a/src/rustc/middle/ty.rs +++ b/src/rustc/middle/ty.rs @@ -68,7 +68,7 @@ export sty; export subst, subst_tps, substs_is_noop, substs_to_str, substs; export t; export new_ty_hash; -export enum_variants, substd_enum_variants; +export enum_variants, substd_enum_variants, enum_is_univariant; export iface_methods, store_iface_methods, impl_iface; export enum_variant_with_id; export ty_dtor; @@ -88,6 +88,7 @@ export ty_str, mk_str, type_is_str; export ty_vec, mk_vec, type_is_vec; export ty_estr, mk_estr; export ty_evec, mk_evec; +export ty_unboxed_vec, mk_unboxed_vec, mk_mut_unboxed_vec; export vstore, vstore_fixed, vstore_uniq, vstore_box, vstore_slice; export ty_nil, mk_nil, type_is_nil; export ty_iface, mk_iface; @@ -378,6 +379,7 @@ enum sty { ty_type, // type_desc* ty_opaque_box, // used by monomorphizer to represent any @ box ty_opaque_closure_ptr(closure_kind), // ptr to env for fn, fn@, fn~ + ty_unboxed_vec(mt), } // In the middle end, constraints have a def_id attached, referring @@ -576,7 +578,8 @@ fn mk_t_with_id(cx: ctxt, st: sty, o_def_id: option) -> t { ty_enum(_, substs) | ty_class(_, substs) | ty_iface(_, substs) { flags |= sflags(substs); } - ty_box(m) | ty_uniq(m) | ty_vec(m) | ty_evec(m, _) | ty_ptr(m) { + ty_box(m) | ty_uniq(m) | ty_vec(m) | ty_evec(m, _) | + ty_ptr(m) | ty_unboxed_vec(m) { flags |= get(m.ty).flags; } ty_rptr(r, m) { @@ -671,6 +674,14 @@ fn mk_evec(cx: ctxt, tm: mt, t: vstore) -> t { mk_t(cx, ty_evec(tm, t)) } +fn mk_unboxed_vec(cx: ctxt, tm: mt) -> t { + mk_t(cx, ty_unboxed_vec(tm)) +} +fn mk_mut_unboxed_vec(cx: ctxt, ty: t) -> t { + mk_t(cx, ty_unboxed_vec({ty: ty, mutbl: ast::m_imm})) +} + + fn mk_rec(cx: ctxt, fs: [field]) -> t { mk_t(cx, ty_rec(fs)) } fn mk_constr(cx: ctxt, t: t, cs: [@type_constr]) -> t { @@ -752,7 +763,7 @@ fn maybe_walk_ty(ty: t, f: fn(t) -> bool) { ty_opaque_closure_ptr(_) | ty_var(_) | ty_var_integral(_) | ty_param(_, _) { } - ty_box(tm) | ty_vec(tm) | ty_evec(tm, _) | + ty_box(tm) | ty_vec(tm) | ty_evec(tm, _) | ty_unboxed_vec(tm) | ty_ptr(tm) | ty_rptr(_, tm) { maybe_walk_ty(tm.ty, f); } @@ -801,6 +812,9 @@ fn fold_sty(sty: sty, fldop: fn(t) -> t) -> sty { ty_vec(tm) { ty_vec({ty: fldop(tm.ty), mutbl: tm.mutbl}) } + ty_unboxed_vec(tm) { + ty_unboxed_vec({ty: fldop(tm.ty), mutbl: tm.mutbl}) + } ty_evec(tm, vst) { ty_evec({ty: fldop(tm.ty), mutbl: tm.mutbl}, vst) } @@ -1155,7 +1169,7 @@ pure fn type_is_unsafe_ptr(ty: t) -> bool { pure fn type_is_vec(ty: t) -> bool { ret alt get(ty).struct { - ty_vec(_) | ty_evec(_, _) { true } + ty_vec(_) | ty_evec(_, _) | ty_unboxed_vec(_) { true } ty_str | ty_estr(_) { true } _ { false } }; @@ -1593,7 +1607,7 @@ fn type_kind(cx: ctxt, ty: t) -> kind { ty_var(_) | ty_var_integral(_) { cx.sess.bug("Asked to compute kind of a type variable"); } - ty_type | ty_opaque_closure_ptr(_) | ty_opaque_box { + ty_type | ty_opaque_closure_ptr(_) | ty_opaque_box | ty_unboxed_vec(_) { cx.sess.bug("Asked to compute kind of fictitious type"); } }; @@ -1647,6 +1661,7 @@ fn is_instantiable(cx: ctxt, r_ty: t) -> bool { ty_opaque_box | ty_opaque_closure_ptr(_) | ty_evec(_, _) | + ty_unboxed_vec(_) | ty_vec(_) { false } @@ -2052,38 +2067,35 @@ fn hash_type_structure(st: sty) -> uint { ty_box(mt) { hash_subty(19u, mt.ty) } ty_evec(mt, _) { hash_subty(20u, mt.ty) } ty_vec(mt) { hash_subty(21u, mt.ty) } + ty_unboxed_vec(mt) { hash_subty(22u, mt.ty) } + ty_tup(ts) { hash_subtys(25u, ts) } ty_rec(fields) { let mut h = 26u; for fields.each {|f| h = hash_subty(h, f.mt.ty); } h } - ty_tup(ts) { hash_subtys(25u, ts) } ty_fn(f) { let mut h = 27u; for f.inputs.each {|a| h = hash_subty(h, a.ty); } hash_subty(h, f.output) } + ty_self { 28u } ty_var(v) { hash_uint(29u, v.to_uint()) } ty_var_integral(v) { hash_uint(30u, v.to_uint()) } ty_param(pid, did) { hash_def(hash_uint(31u, pid), did) } - ty_self { 28u } ty_type { 32u } ty_bot { 34u } ty_ptr(mt) { hash_subty(35u, mt.ty) } - ty_rptr(region, mt) { - let mut h = (46u << 2u) + hash_region(region); - hash_subty(h, mt.ty) - } - ty_res(did, sub, substs) { - let mut h = hash_subty(hash_def(18u, did), sub); - hash_substs(h, substs) - } ty_constr(t, cs) { let mut h = hash_subty(36u, t); for cs.each {|c| h = (h << 2u) + hash_type_constr(h, c); } h } ty_uniq(mt) { hash_subty(37u, mt.ty) } + ty_res(did, sub, substs) { + let mut h = hash_subty(hash_def(38u, did), sub); + hash_substs(h, substs) + } ty_iface(did, substs) { let mut h = hash_def(40u, did); hash_substs(h, substs) @@ -2096,6 +2108,10 @@ fn hash_type_structure(st: sty) -> uint { let mut h = hash_def(45u, did); hash_substs(h, substs) } + ty_rptr(region, mt) { + let mut h = (46u << 2u) + hash_region(region); + hash_subty(h, mt.ty) + } } } @@ -2411,6 +2427,7 @@ fn ty_sort_str(cx: ctxt, t: t) -> str { ty_box(_) { "@-ptr" } ty_uniq(_) { "~-ptr" } ty_evec(_, _) | ty_vec(_) { "vector" } + ty_unboxed_vec(_) { "unboxed vector" } ty_ptr(_) { "*-ptr" } ty_rptr(_, _) { "&-ptr" } ty_rec(_) { "record" } @@ -2663,6 +2680,10 @@ fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path { } } +fn enum_is_univariant(cx: ctxt, id: ast::def_id) -> bool { + vec::len(*enum_variants(cx, id)) == 1u +} + fn enum_variants(cx: ctxt, id: ast::def_id) -> @[variant_info] { alt cx.enum_var_cache.find(id) { some(variants) { ret variants; } diff --git a/src/rustc/util/ppaux.rs b/src/rustc/util/ppaux.rs index c5c445702d517..3c60cb911fe4a 100644 --- a/src/rustc/util/ppaux.rs +++ b/src/rustc/util/ppaux.rs @@ -8,7 +8,7 @@ import middle::ty::{ty_estr, ty_evec, ty_float, ty_fn, ty_iface, ty_int}; import middle::ty::{ty_nil, ty_opaque_box, ty_opaque_closure_ptr, ty_param}; import middle::ty::{ty_ptr, ty_rec, ty_res, ty_rptr, ty_self, ty_str, ty_tup}; import middle::ty::{ty_type, ty_uniq, ty_uint, ty_var, ty_var_integral}; -import middle::ty::{ty_vec, vid}; +import middle::ty::{ty_vec, ty_unboxed_vec, vid}; import metadata::encoder; import syntax::codemap; import syntax::print::pprust; @@ -186,6 +186,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> str { } } ty_vec(tm) { "[" + mt_to_str(cx, tm) + "]" } + ty_unboxed_vec(tm) { "unboxed_vec<" + mt_to_str(cx, tm) + ">" } ty_type { "type" } ty_rec(elems) { let mut strs: [str] = []; diff --git a/src/test/auxiliary/test_comm.rs b/src/test/auxiliary/test_comm.rs new file mode 100644 index 0000000000000..48083fdf3f4fd --- /dev/null +++ b/src/test/auxiliary/test_comm.rs @@ -0,0 +1,108 @@ +/* + Minimized version of core::comm (with still-local modifications + to turn a resource into a class) for testing. + + Could probably be more minimal. + */ + +import libc::size_t; + +export port::{}; +export port; +export recv; + + +#[doc = " +A communication endpoint that can receive messages + +Each port has a unique per-task identity and may not be replicated or +transmitted. If a port value is copied, both copies refer to the same +port. Ports may be associated with multiple `chan`s. +"] +enum port { + port_t(@port_ptr) +} + +#[doc = "Constructs a port"] +fn port() -> port { + port_t(@port_ptr(rustrt::new_port(sys::size_of::() as size_t))) +} + +class port_ptr { + let po: *rust_port; + new(po: *rust_port) { + #debug("in the port_ptr constructor"); + self.po = po; } + drop unsafe { + #debug("in the port_ptr destructor"); + task::unkillable {|| + let yield = 0u; + let yieldp = ptr::addr_of(yield); + rustrt::rust_port_begin_detach(self.po, yieldp); + if yield != 0u { + task::yield(); + } + rustrt::rust_port_end_detach(self.po); + + while rustrt::rust_port_size(self.po) > 0u as size_t { + recv_::(self.po); + } + rustrt::del_port(self.po); + } + } +} + + +#[doc = " +Receive from a port. If no data is available on the port then the +task will block until data becomes available. +"] +fn recv(p: port) -> T { recv_((**p).po) } + + +#[doc = "Receive on a raw port pointer"] +fn recv_(p: *rust_port) -> T { + let yield = 0u; + let yieldp = ptr::addr_of(yield); + let mut res; + res = rusti::init::(); + rustrt::port_recv(ptr::addr_of(res) as *uint, p, yieldp); + + if yield != 0u { + // Data isn't available yet, so res has not been initialized. + task::yield(); + } else { + // In the absense of compiler-generated preemption points + // this is a good place to yield + task::yield(); + } + ret res; +} + + +/* Implementation details */ + + +enum rust_port {} + +type port_id = int; + +#[abi = "cdecl"] +native mod rustrt { + + fn new_port(unit_sz: libc::size_t) -> *rust_port; + fn del_port(po: *rust_port); + fn rust_port_begin_detach(po: *rust_port, + yield: *libc::uintptr_t); + fn rust_port_end_detach(po: *rust_port); + fn rust_port_size(po: *rust_port) -> libc::size_t; + fn port_recv(dptr: *uint, po: *rust_port, + yield: *libc::uintptr_t); +} + +#[abi = "rust-intrinsic"] +native mod rusti { + fn init() -> T; +} + + diff --git a/src/test/compile-fail/empty-vec-trailing-comma.rs b/src/test/compile-fail/empty-vec-trailing-comma.rs new file mode 100644 index 0000000000000..e7bb6b704a194 --- /dev/null +++ b/src/test/compile-fail/empty-vec-trailing-comma.rs @@ -0,0 +1,3 @@ +fn main() { + let v = [,]; //! ERROR unexpected token: ',' +} diff --git a/src/test/run-pass/borrowck-newtype-issue-2573.rs b/src/test/run-pass/borrowck-newtype-issue-2573.rs new file mode 100644 index 0000000000000..5ca39da8cb3c2 --- /dev/null +++ b/src/test/run-pass/borrowck-newtype-issue-2573.rs @@ -0,0 +1,16 @@ +enum foo = {mut bar: baz}; + +enum baz = @{mut baz: int}; + +impl quuux for foo { + fn frob() { + really_impure(self.bar); + } +} + +// Override default mode so that we are passing by value +fn really_impure(++bar: baz) { + bar.baz = 3; +} + +fn main() {} \ No newline at end of file diff --git a/src/test/run-pass/borrowck-univariant-enum.rs b/src/test/run-pass/borrowck-univariant-enum.rs new file mode 100644 index 0000000000000..3b01c4aa076bc --- /dev/null +++ b/src/test/run-pass/borrowck-univariant-enum.rs @@ -0,0 +1,19 @@ +enum newtype { + newtype(int) +} + +fn main() { + + // Test that borrowck treats enums with a single variant + // specially. + + let x = @mut 5; + let y = @mut newtype(3); + let z = alt *y { + newtype(b) { + *x += 1; + *x * b + } + }; + assert z == 18; +} \ No newline at end of file diff --git a/src/test/run-pass/explicit-i-suffix.rs b/src/test/run-pass/explicit-i-suffix.rs new file mode 100644 index 0000000000000..71b7c84410203 --- /dev/null +++ b/src/test/run-pass/explicit-i-suffix.rs @@ -0,0 +1,9 @@ +fn main() { + let x: int = 8i; + let y = 9i; + x + y; + + let q: int = -8i; + let r = -9i; + q + r; +} diff --git a/src/test/run-pass/leaky_comm.rs b/src/test/run-pass/leaky_comm.rs new file mode 100644 index 0000000000000..fe4f44caf08ec --- /dev/null +++ b/src/test/run-pass/leaky_comm.rs @@ -0,0 +1,19 @@ +// xfail-fast +// aux-build:test_comm.rs + +use test_comm; + +fn main() { + let p = test_comm::port(); + + alt none:: { + none {} + some(_) { + if test_comm::recv(p) == 0 { + #error("floop"); + } + else { + #error("bloop"); + } + }} +} \ No newline at end of file diff --git a/src/test/run-pass/log-str.rs b/src/test/run-pass/log-str.rs index 0fb989aa30855..80157c0e9388c 100644 --- a/src/test/run-pass/log-str.rs +++ b/src/test/run-pass/log-str.rs @@ -1,4 +1,4 @@ fn main() { - assert "[1, 2, 3]" == sys::log_str([1, 2, 3]); - assert #fmt["%?/%5?", [1, 2, 3], "hi"] == "[1, 2, 3]/ \"hi\""; + assert "~[1, 2, 3]" == sys::log_str([1, 2, 3]); + assert #fmt["%?/%5?", [1, 2, 3], "hi"] == "~[1, 2, 3]/ \"hi\""; } diff --git a/src/test/run-pass/vec-trailing-comma.rs b/src/test/run-pass/vec-trailing-comma.rs new file mode 100644 index 0000000000000..473ffe9f11d69 --- /dev/null +++ b/src/test/run-pass/vec-trailing-comma.rs @@ -0,0 +1,10 @@ +// Issue #2482. + +fn main() { + let v1: [int] = [10, 20, 30,]; + let v2: [int] = [10, 20, 30]; + assert (v1[2] == v2[2]); + let v3: [int] = [10,]; + let v4: [int] = [10]; + assert (v3[0] == v4[0]); +}