Skip to content

Upgrade to rustc 1.0.0-nightly (c89de2c56 2015-03-28) (built 2015-03-29) #116

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions examples/noop-tokenize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

// Run a single benchmark once. For use with profiling tools.

#![feature(core, test)]
#![feature(test)]

extern crate test;
extern crate html5ever;
Expand All @@ -36,7 +36,7 @@ impl TokenSink for Sink {
fn main() {
let mut path = env::current_exe().unwrap();
path.push("../data/bench/");
path.push(env::args().nth(1).unwrap().as_slice());
path.push(env::args().nth(1).unwrap());

let mut file = fs::File::open(&path).unwrap();
let mut file_input = String::new();
Expand Down
4 changes: 2 additions & 2 deletions examples/tokenize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

#![feature(core, collections)]
#![feature(collections)]

extern crate html5ever;

Expand Down Expand Up @@ -43,7 +43,7 @@ impl TokenSink for TokenPrinter {
fn process_token(&mut self, token: Token) {
match token {
CharacterTokens(b) => {
for c in b.as_slice().chars() {
for c in b.chars() {
self.do_char(c);
}
}
Expand Down
4 changes: 2 additions & 2 deletions macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@
#![crate_type="dylib"]

#![feature(plugin_registrar, quote)]
#![feature(rustc_private, core, std_misc)]
#![feature(rustc_private, convert, slice_patterns)]
#![deny(warnings)]

extern crate syntax;
extern crate rustc;
extern crate "rustc-serialize" as rustc_serialize;
extern crate rustc_serialize;

#[macro_use]
extern crate mac;
Expand Down
7 changes: 4 additions & 3 deletions macros/src/named_entities.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ use std::path::PathBuf;
use std::fs;
use std::str::FromStr;
use std::collections::HashMap;
use std::convert::From;

use rustc_serialize::json;
use rustc_serialize::json::Json;
Expand Down Expand Up @@ -52,7 +53,7 @@ fn build_map(js: Json) -> Option<HashMap<String, [u32; 2]>> {
}

// Slice off the initial '&'
assert!(k.as_slice().char_at(0) == '&');
assert!(k.chars().next() == Some('&'));
map.insert(k[1..].to_string(), codepoint_pair);
}

Expand Down Expand Up @@ -95,7 +96,7 @@ pub fn expand(cx: &mut ExtCtxt, sp: Span, tt: &[TokenTree]) -> Box<MacResult+'st
}, "unexpected result from file!()");

// Combine those to get an absolute path to entities.json.
let mut path = PathBuf::new(&mod_filename);
let mut path: PathBuf = From::from(&mod_filename);
path.pop();
path.push(&json_filename);

Expand All @@ -111,7 +112,7 @@ pub fn expand(cx: &mut ExtCtxt, sp: Span, tt: &[TokenTree]) -> Box<MacResult+'st
//
// phf_map!(k => v, k => v, ...)
let toks: Vec<_> = map.into_iter().flat_map(|(k, [c0, c1])| {
let k = k.as_slice();
let k = &k[..];
(quote_tokens!(&mut *cx, $k => [$c0, $c1],)).into_iter()
}).collect();
MacEager::expr(quote_expr!(&mut *cx, phf_map!($toks)))
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
#![crate_name="html5ever"]
#![crate_type="dylib"]

#![feature(plugin, box_syntax, no_std, core, collections, alloc)]
#![feature(plugin, box_syntax, no_std, core, collections, alloc, str_char, slice_patterns)]
#![deny(warnings)]
#![allow(unused_parens)]

Expand Down
12 changes: 6 additions & 6 deletions src/sink/owned_dom.rs
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ impl TreeSink for Sink {
// Append to an existing Text node if we have one.
match child {
AppendText(ref text) => match parent.children.last() {
Some(h) => if append_to_existing_text(*h, text.as_slice()) { return; },
Some(h) => if append_to_existing_text(*h, &text) { return; },
_ => (),
},
_ => (),
Expand All @@ -247,7 +247,7 @@ impl TreeSink for Sink {
// Look for a text node before the insertion point.
(AppendText(text), i) => {
let prev = parent.children[i-1];
if append_to_existing_text(prev, text.as_slice()) {
if append_to_existing_text(prev, &text) {
return Ok(());
}
self.new_node(Text(text))
Expand Down Expand Up @@ -363,7 +363,7 @@ impl Serializable for Node {
(_, &Element(ref name, ref attrs)) => {
if traversal_scope == IncludeNode {
try!(serializer.start_elem(name.clone(),
attrs.iter().map(|at| (&at.name, at.value.as_slice()))));
attrs.iter().map(|at| (&at.name, &at.value[..]))));
}

for child in self.children.iter() {
Expand All @@ -385,9 +385,9 @@ impl Serializable for Node {

(ChildrenOnly, _) => Ok(()),

(IncludeNode, &Doctype(ref name, _, _)) => serializer.write_doctype(name.as_slice()),
(IncludeNode, &Text(ref text)) => serializer.write_text(text.as_slice()),
(IncludeNode, &Comment(ref text)) => serializer.write_comment(text.as_slice()),
(IncludeNode, &Doctype(ref name, _, _)) => serializer.write_doctype(&name),
(IncludeNode, &Text(ref text)) => serializer.write_text(&text),
(IncludeNode, &Comment(ref text)) => serializer.write_comment(&text),

(IncludeNode, &Document) => panic!("Can't serialize Document node itself"),
}
Expand Down
13 changes: 7 additions & 6 deletions src/sink/rcdom.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ pub type Handle = Rc<RefCell<Node>>;
/// Weak reference to a DOM node, used for parent pointers.
pub type WeakHandle = Weak<RefCell<Node>>;

#[allow(trivial_casts)]
fn same_node(x: &Handle, y: &Handle) -> bool {
// FIXME: This shouldn't really need to touch the borrow flags, right?
(&*x.borrow() as *const Node) == (&*y.borrow() as *const Node)
Expand Down Expand Up @@ -164,7 +165,7 @@ impl TreeSink for RcDom {
// Append to an existing Text node if we have one.
match child {
AppendText(ref text) => match parent.borrow().children.last() {
Some(h) => if append_to_existing_text(h, text.as_slice()) { return; },
Some(h) => if append_to_existing_text(h, &text) { return; },
_ => (),
},
_ => (),
Expand All @@ -189,7 +190,7 @@ impl TreeSink for RcDom {
(AppendText(text), i) => {
let parent = parent.borrow();
let prev = &parent.children[i-1];
if append_to_existing_text(prev, text.as_slice()) {
if append_to_existing_text(prev, &text) {
return Ok(());
}
new_node(Text(text))
Expand Down Expand Up @@ -276,7 +277,7 @@ impl Serializable for Handle {
(_, &Element(ref name, ref attrs)) => {
if traversal_scope == IncludeNode {
try!(serializer.start_elem(name.clone(),
attrs.iter().map(|at| (&at.name, at.value.as_slice()))));
attrs.iter().map(|at| (&at.name, &at.value[..]))));
}

for handle in node.children.iter() {
Expand All @@ -298,9 +299,9 @@ impl Serializable for Handle {

(ChildrenOnly, _) => Ok(()),

(IncludeNode, &Doctype(ref name, _, _)) => serializer.write_doctype(name.as_slice()),
(IncludeNode, &Text(ref text)) => serializer.write_text(text.as_slice()),
(IncludeNode, &Comment(ref text)) => serializer.write_comment(text.as_slice()),
(IncludeNode, &Doctype(ref name, _, _)) => serializer.write_doctype(&name),
(IncludeNode, &Text(ref text)) => serializer.write_text(&text),
(IncludeNode, &Comment(ref text)) => serializer.write_comment(&text),

(IncludeNode, &Document) => panic!("Can't serialize Document node itself"),
}
Expand Down
8 changes: 4 additions & 4 deletions src/tokenizer/buffer_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ impl BufferQueue {
/// Look at the next available character, if any.
pub fn peek(&mut self) -> Option<char> {
match self.buffers.front() {
Some(&Buffer { pos, ref buf }) => Some(buf.as_slice().char_at(pos)),
Some(&Buffer { pos, ref buf }) => Some(buf.char_at(pos)),
None => None,
}
}
Expand All @@ -84,7 +84,7 @@ impl BufferQueue {
let (result, now_empty) = match self.buffers.front_mut() {
None => (None, false),
Some(&mut Buffer { ref mut pos, ref buf }) => {
let CharRange { ch, next } = buf.as_slice().char_range_at(*pos);
let CharRange { ch, next } = buf.char_range_at(*pos);
*pos = next;
(Some(ch), next >= buf.len())
}
Expand All @@ -111,7 +111,7 @@ impl BufferQueue {
*pos = new_pos;
(Some(NotFromSet(out)), new_pos >= buf.len())
} else {
let CharRange { ch, next } = buf.as_slice().char_range_at(*pos);
let CharRange { ch, next } = buf.char_range_at(*pos);
*pos = next;
(Some(FromSet(ch)), next >= buf.len())
}
Expand Down Expand Up @@ -146,7 +146,7 @@ impl BufferQueue {
}
let ref buf = self.buffers[buffers_exhausted];

let d = buf.buf.as_slice().char_at(consumed_from_last);
let d = buf.buf.char_at(consumed_from_last);
match (c.to_ascii_opt(), d.to_ascii_opt()) {
(Some(c), Some(d)) => if c.eq_ignore_case(d) { () } else { return Some(false) },
_ => return Some(false),
Expand Down
12 changes: 6 additions & 6 deletions src/tokenizer/char_ref/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ impl CharRefTokenizer {
use std::num::wrapping::WrappingOps;

let c = unwrap_or_return!(tokenizer.peek(), Stuck);
match c.to_digit(base as u32) {
match c.to_digit(base) {
Some(n) => {
tokenizer.discard_char();
self.num = self.num.wrapping_mul(base);
Expand All @@ -181,7 +181,7 @@ impl CharRefTokenizer {
// We still parse digits and semicolon, but don't use the result.
self.num_too_big = true;
}
self.num = self.num.wrapping_add(n as u32);
self.num = self.num.wrapping_add(n);
self.seen_digit = true;
Progress
}
Expand Down Expand Up @@ -251,7 +251,7 @@ impl CharRefTokenizer {
fn do_named<Sink: TokenSink>(&mut self, tokenizer: &mut Tokenizer<Sink>) -> Status {
let c = unwrap_or_return!(tokenizer.get_char(), Stuck);
self.name_buf_mut().push(c);
match data::NAMED_ENTITIES.get(self.name_buf().as_slice()) {
match data::NAMED_ENTITIES.get(&self.name_buf()[..]) {
// We have either a full match or a prefix of one.
Some(m) => {
if m[0] != 0 {
Expand All @@ -271,7 +271,7 @@ impl CharRefTokenizer {
fn emit_name_error<Sink: TokenSink>(&mut self, tokenizer: &mut Tokenizer<Sink>) {
let msg = format_if!(tokenizer.opts.exact_errors,
"Invalid character reference",
"Invalid character reference &{}", self.name_buf().as_slice());
"Invalid character reference &{}", self.name_buf());
tokenizer.emit_error(msg);
}

Expand Down Expand Up @@ -313,14 +313,14 @@ impl CharRefTokenizer {

let name_len = self.name_len;
assert!(name_len > 0);
let last_matched = self.name_buf().as_slice().char_at(name_len-1);
let last_matched = self.name_buf().char_at(name_len-1);

// There might not be a next character after the match, if
// we had a full match and then hit EOF.
let next_after = if name_len == self.name_buf().len() {
None
} else {
Some(self.name_buf().as_slice().char_at(name_len))
Some(self.name_buf().char_at(name_len))
};

// "If the character reference is being consumed as part of an
Expand Down
4 changes: 2 additions & 2 deletions src/tokenizer/interface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,8 @@ impl Tag {

let mut self_attrs = self.attrs.clone();
let mut other_attrs = other.attrs.clone();
self_attrs.as_mut_slice().sort();
other_attrs.as_mut_slice().sort();
self_attrs[..].sort();
other_attrs[..].sort();

self_attrs == other_attrs
}
Expand Down
23 changes: 11 additions & 12 deletions src/tokenizer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,8 @@

use core::clone::Clone;
use core::cmp::Ord;
use core::iter::{range, IteratorExt};
use core::iter::{range, Iterator};
use core::option::Option::{self, Some, None};
use core::str::Str;

pub use self::interface::{Doctype, Attribute, TagKind, StartTag, EndTag, Tag};
pub use self::interface::{Token, DoctypeToken, TagToken, CommentToken};
Expand Down Expand Up @@ -61,7 +60,7 @@ fn append_strings(lhs: &mut String, rhs: String) {
if lhs.is_empty() {
*lhs = rhs;
} else {
lhs.push_str(rhs.as_slice());
lhs.push_str(&rhs);
}
}

Expand Down Expand Up @@ -182,7 +181,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
}

let start_tag_name = opts.last_start_tag_name.take()
.map(|s| Atom::from_slice(s.as_slice()));
.map(|s| Atom::from_slice(&s));
let state = opts.initial_state.unwrap_or(states::Data);
let discard_bom = opts.discard_bom;
Tokenizer {
Expand Down Expand Up @@ -229,7 +228,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
return;
}

let pos = if self.discard_bom && input.as_slice().char_at(0) == '\u{feff}' {
let pos = if self.discard_bom && input.char_at(0) == '\u{feff}' {
self.discard_bom = false;
3 // length of BOM in UTF-8
} else {
Expand Down Expand Up @@ -385,7 +384,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
self.finish_attribute();

let name = replace(&mut self.current_tag_name, String::new());
let name = Atom::from_slice(name.as_slice());
let name = Atom::from_slice(&name);

match self.current_tag_kind {
StartTag => {
Expand Down Expand Up @@ -448,7 +447,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
match self.last_start_tag_name.as_ref() {
Some(last) =>
(self.current_tag_kind == EndTag)
&& (self.current_tag_name.as_slice() == last.as_slice()),
&& (self.current_tag_name == last.as_slice()),
None => false,
}
}
Expand All @@ -468,7 +467,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
// FIXME: the spec says we should error as soon as the name is finished.
// FIXME: linear time search, do we care?
let dup = {
let name = self.current_attr_name.as_slice();
let name = &self.current_attr_name[..];
self.current_tag_attrs.iter().any(|a| a.name.local.as_slice() == name)
};

Expand All @@ -481,7 +480,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
self.current_tag_attrs.push(Attribute {
// The tree builder will adjust the namespace if necessary.
// This only happens in foreign elements.
name: QualName::new(ns!(""), Atom::from_slice(name.as_slice())),
name: QualName::new(ns!(""), Atom::from_slice(&name)),
value: replace(&mut self.current_attr_value, empty_str()),
});
}
Expand Down Expand Up @@ -810,7 +809,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
let c = get_char!(self);
match c {
'\t' | '\n' | '\x0C' | ' ' | '/' | '>' => {
let esc = if self.temp_buf.as_slice() == "script" { DoubleEscaped } else { Escaped };
let esc = if self.temp_buf == "script" { DoubleEscaped } else { Escaped };
go!(self: emit c; to RawData ScriptDataEscaped esc);
}
_ => match lower_ascii_letter(c) {
Expand Down Expand Up @@ -860,7 +859,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
let c = get_char!(self);
match c {
'\t' | '\n' | '\x0C' | ' ' | '/' | '>' => {
let esc = if self.temp_buf.as_slice() == "script" { Escaped } else { DoubleEscaped };
let esc = if self.temp_buf == "script" { Escaped } else { DoubleEscaped };
go!(self: emit c; to RawData ScriptDataEscaped esc);
}
_ => match lower_ascii_letter(c) {
Expand Down Expand Up @@ -1252,7 +1251,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {

let mut results: Vec<(states::State, u64)>
= self.state_profile.iter().map(|(s, t)| (*s, *t)).collect();
results.as_mut_slice().sort_by(|&(_, x), &(_, y)| y.cmp(&x));
results[..].sort_by(|&(_, x), &(_, y)| y.cmp(&x));

let total = results.iter().map(|&(_, t)| t).sum();
println!("\nTokenizer profile, in nanoseconds");
Expand Down
2 changes: 1 addition & 1 deletion src/tree_builder/actions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -635,7 +635,7 @@ impl<Handle, Sink> TreeBuilderActions<Handle>
fn is_type_hidden(&self, tag: &Tag) -> bool {
match tag.attrs.iter().find(|&at| at.name == qualname!("", "type")) {
None => false,
Some(at) => at.value.as_slice().eq_ignore_ascii_case("hidden"),
Some(at) => (&*at.value).eq_ignore_ascii_case("hidden"),
}
}

Expand Down
Loading