From 457a52e8cb4c67e1a06288816f08412a98a755cf Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Mon, 13 Apr 2015 10:24:08 +0200 Subject: [PATCH 1/2] Upgrade to rustc 1.0.0-nightly (6790b0e51 2015-04-11) (built 2015-04-12) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Also disable doctests because of https://github.com/rust-lang/cargo/issues/1512 Note that there weren’t any doctest to run anyway. --- Cargo.toml | 12 +++++--- macros/src/lib.rs | 10 +++++++ macros/src/match_token.rs | 58 ++++++++++++++++++++------------------- src/tokenizer/mod.rs | 4 +-- tests/tokenizer.rs | 2 +- tests/tree_builder.rs | 2 +- 6 files changed, 51 insertions(+), 37 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 4989b5a0..6fb85ef8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,13 +4,17 @@ name = "html5ever" version = "0.0.0" authors = [ "The html5ever Project Developers" ] +[lib] +name = "html5ever" + +# https://github.com/rust-lang/cargo/issues/1512 +doctest = false + [dependencies] time = "0" log = "0" - -# servo/string-cache#80 -phf = "0.6" -phf_macros = "0.6" +phf = "0.7" +phf_macros = "0.7" [dependencies.string_cache] git = "https://github.com/servo/string-cache" diff --git a/macros/src/lib.rs b/macros/src/lib.rs index 2d26e9cb..8a9093b8 100644 --- a/macros/src/lib.rs +++ b/macros/src/lib.rs @@ -22,6 +22,16 @@ extern crate mac; use rustc::plugin::Registry; +macro_rules! panictry { + ($e:expr) => ({ + use syntax::diagnostic::FatalError; + match $e { + Ok(e) => e, + Err(FatalError) => panic!(FatalError) + } + }) +} + // Make these public so that rustdoc will generate documentation for them. pub mod named_entities; pub mod match_token; diff --git a/macros/src/match_token.rs b/macros/src/match_token.rs index 41278863..733f4da8 100644 --- a/macros/src/match_token.rs +++ b/macros/src/match_token.rs @@ -100,6 +100,7 @@ matching, by enforcing the following restrictions on its input: use std::collections::{HashSet, HashMap}; use std::collections::hash_map::Entry::{Occupied, Vacant}; +use syntax::diagnostic::FatalError; use syntax::ptr::P; use syntax::codemap::{Span, Spanned, spanned}; use syntax::ast; @@ -171,46 +172,46 @@ fn push_all(lhs: &mut Vec, rhs: Vec) { lhs.extend(rhs.into_iter()); } -fn parse_spanned_ident(parser: &mut Parser) -> ast::SpannedIdent { +fn parse_spanned_ident(parser: &mut Parser) -> Result { let lo = parser.span.lo; - let ident = parser.parse_ident(); + let ident = try!(parser.parse_ident()); let hi = parser.last_span.hi; - spanned(lo, hi, ident) + Ok(spanned(lo, hi, ident)) } -fn parse_tag(parser: &mut Parser) -> Spanned { +fn parse_tag(parser: &mut Parser) -> Result, FatalError> { let lo = parser.span.lo; - parser.expect(&token::Lt); + try!(parser.expect(&token::Lt)); - let kind = match parser.eat(&token::BinOp(token::Slash)) { + let kind = match try!(parser.eat(&token::BinOp(token::Slash))) { true => EndTag, false => StartTag, }; - let name = match parser.eat(&token::Underscore) { + let name = match try!(parser.eat(&token::Underscore)) { true => None, - false => Some(parser.parse_ident()), + false => Some(try!(parser.parse_ident())), }; - parser.expect(&token::Gt); - spanned(lo, parser.last_span.hi, Tag { + try!(parser.expect(&token::Gt)); + Ok(spanned(lo, parser.last_span.hi, Tag { kind: kind, name: name, - }) + })) } /// Parse a `match_token!` invocation into the little AST defined above. -fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Match { +fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Result { let mut parser = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), toks.to_vec()); - let discriminant = parser.parse_expr_res(parser::RESTRICTION_NO_STRUCT_LITERAL); - parser.commit_expr_expecting(&*discriminant, token::OpenDelim(token::Brace)); + let discriminant = try!(parser.parse_expr_res(parser::RESTRICTION_NO_STRUCT_LITERAL)); + try!(parser.commit_expr_expecting(&*discriminant, token::OpenDelim(token::Brace))); let mut arms: Vec = Vec::new(); while parser.token != token::CloseDelim(token::Brace) { let mut binding = None; if parser.look_ahead(1, |t| *t == token::At) { - binding = Some(parse_spanned_ident(&mut parser)); - parser.bump(); // Consume the @ + binding = Some(try!(parse_spanned_ident(&mut parser))); + try!(parser.bump()); // Consume the @ } let lhs_lo = parser.span.lo; @@ -219,23 +220,23 @@ fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Match { token::Lt => { let mut tags = Vec::new(); while parser.token != token::FatArrow { - tags.push(parse_tag(&mut parser)); + tags.push(try!(parse_tag(&mut parser))); } Tags(tags) } - _ => parser.fatal("unrecognized pattern"), + _ => return Err(parser.fatal("unrecognized pattern")), }; let lhs_hi = parser.last_span.hi; - parser.expect(&token::FatArrow); + try!(parser.expect(&token::FatArrow)); let rhs_lo = parser.span.lo; let mut rhs_hi = parser.span.hi; - let rhs = if parser.eat_keyword(token::keywords::Else) { - parser.expect(&token::Comma); + let rhs = if try!(parser.eat_keyword(token::keywords::Else)) { + try!(parser.expect(&token::Comma)); Else } else { - let expr = parser.parse_expr_res(parser::RESTRICTION_STMT_EXPR); + let expr = try!(parser.parse_expr_res(parser::RESTRICTION_STMT_EXPR)); rhs_hi = parser.last_span.hi; let require_comma = @@ -243,9 +244,10 @@ fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Match { && parser.token != token::CloseDelim(token::Brace); if require_comma { - parser.commit_expr(&*expr, &[token::Comma], &[token::CloseDelim(token::Brace)]); + try!(parser.commit_expr( + &*expr, &[token::Comma], &[token::CloseDelim(token::Brace)])); } else { - parser.eat(&token::Comma); + try!(parser.eat(&token::Comma)); } Expr(expr) @@ -259,12 +261,12 @@ fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Match { } // Consume the closing brace - parser.bump(); + try!(parser.bump()); - Match { + Ok(Match { discriminant: discriminant, arms: arms, - } + }) } /// Description of a wildcard match arm. @@ -292,7 +294,7 @@ fn make_tag_pattern(cx: &mut ExtCtxt, binding: Tokens, tag: Tag) -> Tokens { /// Expand the `match_token!` macro. pub fn expand(cx: &mut ExtCtxt, span: Span, toks: &[ast::TokenTree]) -> Box { - let Match { discriminant, mut arms } = parse(cx, toks); + let Match { discriminant, mut arms } = panictry!(parse(cx, toks)); // Handle the last arm specially at the end. let last_arm = match arms.pop() { diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs index 6ef440b5..270f1f1e 100644 --- a/src/tokenizer/mod.rs +++ b/src/tokenizer/mod.rs @@ -1247,13 +1247,11 @@ impl Tokenizer { #[cfg(not(for_c))] fn dump_profile(&self) { - use core::iter::AdditiveIterator; - let mut results: Vec<(states::State, u64)> = self.state_profile.iter().map(|(s, t)| (*s, *t)).collect(); results.sort_by(|&(_, x), &(_, y)| y.cmp(&x)); - let total = results.iter().map(|&(_, t)| t).sum(); + let total: u64 = results.iter().map(|&(_, t)| t).sum(); println!("\nTokenizer profile, in nanoseconds"); println!("\n{:12} total in token sink", self.time_in_sink); println!("\n{:12} total in tokenizer", total); diff --git a/tests/tokenizer.rs b/tests/tokenizer.rs index b2d83d2f..8e188f30 100644 --- a/tests/tokenizer.rs +++ b/tests/tokenizer.rs @@ -390,7 +390,7 @@ fn tests(src_dir: &Path) -> Vec { let mut tests = vec!(); foreach_html5lib_test(src_dir, "tokenizer", - OsStr::from_str("test"), |path, mut file| { + OsStr::new("test"), |path, mut file| { let js = Json::from_reader(&mut file).ok().expect("json parse error"); match js.get_obj().get(&"tests".to_string()) { diff --git a/tests/tree_builder.rs b/tests/tree_builder.rs index 2f0811b6..c2149987 100644 --- a/tests/tree_builder.rs +++ b/tests/tree_builder.rs @@ -210,7 +210,7 @@ fn tests(src_dir: &Path, ignores: &HashSet) -> Vec { let mut tests = vec!(); foreach_html5lib_test(src_dir, "tree-construction", - OsStr::from_str("dat"), |path, file| { + OsStr::new("dat"), |path, file| { let buf = io::BufReader::new(file); let lines = buf.lines() .map(|res| res.ok().expect("couldn't read")); From 689784556559a430c1688bd175da273f1ea5e5fc Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Mon, 13 Apr 2015 10:39:53 +0200 Subject: [PATCH 2/2] Use crates.io libc crate in libhtml5ever_for_c.a --- Makefile.in | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile.in b/Makefile.in index 738a118a..01729d76 100644 --- a/Makefile.in +++ b/Makefile.in @@ -15,6 +15,7 @@ RUST_DIRS := -L $(VPATH)/target/debug -L $(VPATH)/target/debug/deps RUSTC_CMD := $(RUSTC) -D warnings -C rpath $(RUST_DIRS) \ --extern time=`find $(VPATH)/target/debug/deps -name 'libtime-*.rlib'` \ --extern log=`find $(VPATH)/target/debug/deps -name 'liblog-*.rlib'` \ + --extern libc=`find $(VPATH)/target/debug/deps -name 'liblibc-*.rlib'` \ $(RUSTFLAGS) # We build the library itself using Cargo.