From f460b1d479440f53292594a5122bfc52e708d512 Mon Sep 17 00:00:00 2001 From: Daniel Fath Date: Fri, 3 Apr 2015 17:37:47 +0200 Subject: [PATCH] Rust upgrade to rustc 1.0.0-nightly (d17d6e7f1 2015-04-02) Rustup rustc --version: rustc 1.0.0-nightly (d17d6e7f1 2015-04-02) (built 2015-04-03) --- examples/tokenize.rs | 2 +- macros/src/lib.rs | 2 +- src/for_c/common.rs | 5 +++++ src/for_c/tokenizer.rs | 10 ++++++++++ src/serialize/mod.rs | 4 ++-- src/tokenizer/mod.rs | 2 +- tests/tokenizer.rs | 4 +--- tests/tree_builder.rs | 7 +++---- 8 files changed, 24 insertions(+), 12 deletions(-) diff --git a/examples/tokenize.rs b/examples/tokenize.rs index 5d379968..5635b038 100644 --- a/examples/tokenize.rs +++ b/examples/tokenize.rs @@ -18,7 +18,7 @@ use html5ever::tokenizer::{TokenSink, Token, TokenizerOpts, ParseError}; use html5ever::tokenizer::{CharacterTokens, NullCharacterToken, TagToken, StartTag, EndTag}; use html5ever::driver::{tokenize_to, one_input}; -#[derive(Copy)] +#[derive(Copy, Clone)] struct TokenPrinter { in_char_run: bool, } diff --git a/macros/src/lib.rs b/macros/src/lib.rs index f22fb110..2d26e9cb 100644 --- a/macros/src/lib.rs +++ b/macros/src/lib.rs @@ -10,7 +10,7 @@ #![crate_name="html5ever_macros"] #![crate_type="dylib"] -#![feature(plugin_registrar, quote, rustc_private, convert, slice_patterns)] +#![feature(plugin_registrar, quote, rustc_private, slice_patterns)] #![deny(warnings)] extern crate syntax; diff --git a/src/for_c/common.rs b/src/for_c/common.rs index e5bd248d..d469a484 100644 --- a/src/for_c/common.rs +++ b/src/for_c/common.rs @@ -27,6 +27,11 @@ pub struct h5e_buf { } impl Copy for h5e_buf { } +impl Clone for h5e_buf { + fn clone(&self) -> h5e_buf { + *self + } +} impl h5e_buf { pub fn null() -> h5e_buf { diff --git a/src/for_c/tokenizer.rs b/src/for_c/tokenizer.rs index 8bea595e..bdb02fb0 100644 --- a/src/for_c/tokenizer.rs +++ b/src/for_c/tokenizer.rs @@ -41,6 +41,11 @@ pub struct h5e_token_ops { } impl Copy for h5e_token_ops { } +impl Clone for h5e_token_ops { + fn clone(&self) -> h5e_token_ops { + *self + } +} #[repr(C)] pub struct h5e_token_sink { @@ -49,6 +54,11 @@ pub struct h5e_token_sink { } impl Copy for h5e_token_sink { } +impl Clone for h5e_token_sink { + fn clone(&self) -> h5e_token_sink { + *self + } +} impl TokenSink for *mut h5e_token_sink { fn process_token(&mut self, token: Token) { diff --git a/src/serialize/mod.rs b/src/serialize/mod.rs index 75f1539c..8cc23419 100644 --- a/src/serialize/mod.rs +++ b/src/serialize/mod.rs @@ -16,7 +16,7 @@ use collections::vec::Vec; use string_cache::{Atom, QualName}; //ยง serializing-html-fragments -#[derive(Copy, PartialEq)] +#[derive(Copy, Clone, PartialEq)] pub enum TraversalScope { IncludeNode, ChildrenOnly @@ -34,7 +34,7 @@ pub fn serialize node.serialize(&mut ser, opts.traversal_scope) } -#[derive(Copy)] +#[derive(Copy, Clone)] pub struct SerializeOpts { /// Is scripting enabled? pub scripting_enabled: bool, diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs index d0fe9460..6ef440b5 100644 --- a/src/tokenizer/mod.rs +++ b/src/tokenizer/mod.rs @@ -13,7 +13,7 @@ use core::clone::Clone; use core::cmp::Ord; -use core::iter::{range, Iterator}; +use core::iter::Iterator; use core::option::Option::{self, Some, None}; pub use self::interface::{Doctype, Attribute, TagKind, StartTag, EndTag, Tag}; diff --git a/tests/tokenizer.rs b/tests/tokenizer.rs index 4973395f..b2d83d2f 100644 --- a/tests/tokenizer.rs +++ b/tests/tokenizer.rs @@ -8,7 +8,6 @@ // except according to those terms. #![feature(plugin, start, std_misc, test, slice_patterns)] - #![plugin(string_cache_plugin)] extern crate test; @@ -25,7 +24,6 @@ use std::ffi::OsStr; use std::mem::replace; use std::default::Default; use std::path::Path; -use std::thunk::Thunk; use test::{TestDesc, TestDescAndFn, DynTestName, DynTestFn}; use test::ShouldPanic::No; use rustc_serialize::json::Json; @@ -309,7 +307,7 @@ fn mk_test(desc: String, input: String, expect: Vec, opts: TokenizerOpts) ignore: false, should_panic: No, }, - testfn: DynTestFn(Thunk::new(move || { + testfn: DynTestFn(Box::new(move || { // Split up the input at different points to test incremental tokenization. let insplits = splits(&input, 3); for input in insplits.into_iter() { diff --git a/tests/tree_builder.rs b/tests/tree_builder.rs index be852c0d..92482ea3 100644 --- a/tests/tree_builder.rs +++ b/tests/tree_builder.rs @@ -27,7 +27,6 @@ use std::mem::replace; use std::default::Default; use std::path::Path; use std::collections::{HashSet, HashMap}; -use std::thunk::Thunk; use test::{TestDesc, TestDescAndFn, DynTestName, DynTestFn}; use test::ShouldPanic::No; @@ -165,7 +164,7 @@ fn make_test( ignore: ignore, should_panic: No, }, - testfn: DynTestFn(Thunk::new(move || { + testfn: DynTestFn(Box::new(move || { let mut result = String::new(); match context { None => { @@ -174,9 +173,9 @@ fn make_test( serialize(&mut result, 1, child.clone()); } }, - Some(context) => { + Some(ref context) => { let dom: RcDom = parse_fragment(one_input(data.clone()), - context, + context.clone(), Default::default()); // fragment case: serialize children of the html element // rather than children of the document