diff --git a/examples/html2html.rs b/examples/html2html.rs
index 5045f6a9..b55d1388 100644
--- a/examples/html2html.rs
+++ b/examples/html2html.rs
@@ -7,7 +7,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(io)]
+#![feature(old_io)]
//! Parse and re-serialize a HTML5 document.
//!
diff --git a/examples/noop-tokenize.rs b/examples/noop-tokenize.rs
index 1b7b4a4f..cb3db552 100644
--- a/examples/noop-tokenize.rs
+++ b/examples/noop-tokenize.rs
@@ -9,7 +9,7 @@
// Run a single benchmark once. For use with profiling tools.
-#![feature(core, env, io, test, path)]
+#![feature(core, env, old_io, test, old_path)]
extern crate test;
extern crate html5ever;
diff --git a/examples/noop-tree-builder.rs b/examples/noop-tree-builder.rs
index 8bc66b96..e12b7868 100644
--- a/examples/noop-tree-builder.rs
+++ b/examples/noop-tree-builder.rs
@@ -7,7 +7,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(io)]
+#![feature(old_io)]
extern crate string_cache;
@@ -17,7 +17,7 @@ use std::old_io as io;
use std::default::Default;
use std::string::String;
use std::collections::HashMap;
-use std::string::CowString;
+use std::borrow::Cow;
use string_cache::QualName;
use html5ever::{parse_to, one_input};
@@ -70,7 +70,7 @@ impl TreeSink for Sink {
Ok(())
}
- fn parse_error(&mut self, _msg: CowString<'static>) { }
+ fn parse_error(&mut self, _msg: Cow<'static, str>) { }
fn set_quirks_mode(&mut self, _mode: QuirksMode) { }
fn append(&mut self, _parent: usize, _child: NodeOrText) { }
diff --git a/examples/print-rcdom.rs b/examples/print-rcdom.rs
index 2a8efc12..19860140 100644
--- a/examples/print-rcdom.rs
+++ b/examples/print-rcdom.rs
@@ -7,7 +7,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(plugin, io)]
+#![feature(plugin, old_io)]
#![plugin(string_cache_plugin)]
extern crate html5ever;
diff --git a/examples/print-tree-actions.rs b/examples/print-tree-actions.rs
index c21bfa9d..cbdf8243 100644
--- a/examples/print-tree-actions.rs
+++ b/examples/print-tree-actions.rs
@@ -7,7 +7,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(io)]
+#![feature(old_io)]
extern crate string_cache;
@@ -17,7 +17,7 @@ use std::old_io as io;
use std::default::Default;
use std::string::String;
use std::collections::HashMap;
-use std::string::CowString;
+use std::borrow::Cow;
use string_cache::QualName;
use html5ever::{parse_to, one_input};
@@ -40,7 +40,7 @@ impl Sink {
impl TreeSink for Sink {
type Handle = usize;
- fn parse_error(&mut self, msg: CowString<'static>) {
+ fn parse_error(&mut self, msg: Cow<'static, str>) {
println!("Parse error: {}", msg);
}
diff --git a/examples/tokenize.rs b/examples/tokenize.rs
index b7cad825..0b776571 100644
--- a/examples/tokenize.rs
+++ b/examples/tokenize.rs
@@ -7,7 +7,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(core, io)]
+#![feature(core, old_io)]
extern crate html5ever;
diff --git a/macros/src/lib.rs b/macros/src/lib.rs
index 6880824a..e9569bb3 100644
--- a/macros/src/lib.rs
+++ b/macros/src/lib.rs
@@ -11,7 +11,7 @@
#![crate_type="dylib"]
#![feature(plugin_registrar, quote)]
-#![feature(rustc_private, core, hash, collections, path, io)]
+#![feature(rustc_private, core, collections, old_path, old_io, std_misc)]
#![deny(warnings)]
extern crate syntax;
diff --git a/src/for_c/common.rs b/src/for_c/common.rs
index e59e360f..adad27cc 100644
--- a/src/for_c/common.rs
+++ b/src/for_c/common.rs
@@ -12,8 +12,8 @@ use core::prelude::*;
use core::ptr;
use core::slice;
use core::str;
-use core::marker::ContravariantLifetime;
-use collections::string::CowString;
+use core::marker::PhantomData;
+use std::borrow::Cow;
use collections::string::String;
use libc::{size_t, c_int, c_char, strlen};
@@ -43,7 +43,7 @@ impl h5e_buf {
pub struct LifetimeBuf<'a> {
buf: h5e_buf,
- marker: ContravariantLifetime<'a>,
+ marker: PhantomData<&'a [u8]>,
}
impl<'a> LifetimeBuf<'a> {
@@ -53,14 +53,14 @@ impl<'a> LifetimeBuf<'a> {
data: x.as_bytes().as_ptr(),
len: x.len() as size_t,
},
- marker: ContravariantLifetime,
+ marker: PhantomData,
}
}
pub fn null() -> LifetimeBuf<'a> {
LifetimeBuf {
buf: h5e_buf::null(),
- marker: ContravariantLifetime,
+ marker: PhantomData,
}
}
@@ -88,7 +88,7 @@ impl AsLifetimeBuf for Atom {
}
}
-impl<'b> AsLifetimeBuf for CowString<'b> {
+impl<'b> AsLifetimeBuf for Cow<'b, str> {
fn as_lifetime_buf<'a>(&'a self) -> LifetimeBuf<'a> {
LifetimeBuf::from_str(self.as_slice())
}
diff --git a/src/lib.rs b/src/lib.rs
index bafaf394..e12ee010 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -11,8 +11,8 @@
#![crate_type="dylib"]
#![feature(plugin, box_syntax, no_std)]
-#![feature(core, hash, collections, alloc)]
-#![cfg_attr(not(for_c), feature(io))]
+#![feature(core, collections, alloc)]
+#![cfg_attr(not(for_c), feature(old_io))]
#![deny(warnings)]
#![allow(unused_parens)]
diff --git a/src/sink/owned_dom.rs b/src/sink/owned_dom.rs
index a0fbb9b8..b375885c 100644
--- a/src/sink/owned_dom.rs
+++ b/src/sink/owned_dom.rs
@@ -37,7 +37,7 @@ use core::ptr;
use alloc::boxed::Box;
use collections::vec::Vec;
use collections::string::String;
-use std::string::CowString;
+use std::borrow::Cow;
use std::old_io::{Writer, IoResult};
use std::collections::HashSet;
use std::ops::{Deref, DerefMut};
@@ -150,7 +150,7 @@ fn append_to_existing_text(mut prev: Handle, text: &str) -> bool {
pub struct Sink {
nodes: Vec>>,
document: Handle,
- errors: Vec>,
+ errors: Vec>,
quirks_mode: QuirksMode,
}
@@ -186,7 +186,7 @@ impl Sink {
impl TreeSink for Sink {
type Handle = Handle;
- fn parse_error(&mut self, msg: CowString<'static>) {
+ fn parse_error(&mut self, msg: Cow<'static, str>) {
self.errors.push(msg);
}
@@ -302,7 +302,7 @@ pub struct Node {
pub struct OwnedDom {
pub document: Box,
- pub errors: Vec>,
+ pub errors: Vec>,
pub quirks_mode: QuirksMode,
}
diff --git a/src/sink/rcdom.rs b/src/sink/rcdom.rs
index e797b79a..e4bf34ca 100644
--- a/src/sink/rcdom.rs
+++ b/src/sink/rcdom.rs
@@ -27,7 +27,7 @@ use core::default::Default;
use alloc::rc::{Rc, Weak};
use collections::vec::Vec;
use collections::string::String;
-use std::string::CowString;
+use std::borrow::Cow;
use std::old_io::{Writer, IoResult};
use std::ops::DerefMut;
@@ -117,7 +117,7 @@ pub struct RcDom {
pub document: Handle,
/// Errors that occurred during parsing.
- pub errors: Vec>,
+ pub errors: Vec>,
/// The document's quirks mode.
pub quirks_mode: QuirksMode,
@@ -126,7 +126,7 @@ pub struct RcDom {
impl TreeSink for RcDom {
type Handle = Handle;
- fn parse_error(&mut self, msg: CowString<'static>) {
+ fn parse_error(&mut self, msg: Cow<'static, str>) {
self.errors.push(msg);
}
diff --git a/src/tokenizer/buffer_queue.rs b/src/tokenizer/buffer_queue.rs
index 7a873e5a..d498bfaf 100644
--- a/src/tokenizer/buffer_queue.rs
+++ b/src/tokenizer/buffer_queue.rs
@@ -14,7 +14,7 @@ use util::smallcharset::SmallCharSet;
use core::str::CharRange;
use collections::string::String;
-use collections::RingBuf;
+use collections::VecDeque;
pub use self::SetResult::{FromSet, NotFromSet};
@@ -36,14 +36,14 @@ pub enum SetResult {
/// consuming characters.
pub struct BufferQueue {
/// Buffers to process.
- buffers: RingBuf,
+ buffers: VecDeque,
}
impl BufferQueue {
/// Create an empty BufferQueue.
pub fn new() -> BufferQueue {
BufferQueue {
- buffers: RingBuf::with_capacity(3),
+ buffers: VecDeque::with_capacity(3),
}
}
@@ -134,7 +134,7 @@ impl BufferQueue {
// If they do not match, return Some(false).
// If not enough characters are available to know, return None.
pub fn eat(&mut self, pat: &str) -> Option {
- let mut buffers_exhausted = 0us;
+ let mut buffers_exhausted = 0usize;
let mut consumed_from_last = match self.buffers.front() {
None => return None,
Some(ref buf) => buf.pos,
diff --git a/src/tokenizer/interface.rs b/src/tokenizer/interface.rs
index 9762fe7d..d06094a2 100644
--- a/src/tokenizer/interface.rs
+++ b/src/tokenizer/interface.rs
@@ -15,7 +15,8 @@ use tokenizer::states;
use collections::vec::Vec;
use collections::slice::SliceExt;
use collections::string::String;
-use std::string::CowString;
+use std::borrow::Cow;
+use std::marker::Send;
use string_cache::{Atom, QualName};
@@ -96,9 +97,12 @@ pub enum Token {
CharacterTokens(String),
NullCharacterToken,
EOFToken,
- ParseError(CowString<'static>),
+ ParseError(Cow<'static, str>),
}
+// FIXME: rust-lang/rust#22629
+unsafe impl Send for Token { }
+
/// Types which can receive tokens from the tokenizer.
pub trait TokenSink {
/// Process a token.
diff --git a/src/tokenizer/mod.rs b/src/tokenizer/mod.rs
index 0c5d8286..c923a427 100644
--- a/src/tokenizer/mod.rs
+++ b/src/tokenizer/mod.rs
@@ -42,8 +42,7 @@ use collections::vec::Vec;
use collections::slice::SliceExt;
use collections::string::{String, ToString};
use collections::str::StrExt;
-use std::string::CowString;
-use std::borrow::Cow::Borrowed;
+use std::borrow::Cow::{self, Borrowed};
use std::collections::BTreeMap;
use string_cache::{Atom, QualName};
@@ -537,7 +536,7 @@ impl Tokenizer {
self.input_buffers.push_front(buf);
}
- fn emit_error(&mut self, error: CowString<'static>) {
+ fn emit_error(&mut self, error: Cow<'static, str>) {
self.process_token(ParseError(error));
}
}
diff --git a/src/tree_builder/actions.rs b/src/tree_builder/actions.rs
index 18e94cbd..8dbd7641 100644
--- a/src/tree_builder/actions.rs
+++ b/src/tree_builder/actions.rs
@@ -668,7 +668,7 @@ impl TreeBuilderActions
// https://html.spec.whatwg.org/multipage/syntax.html#reset-the-insertion-mode-appropriately
fn reset_insertion_mode(&mut self) -> InsertionMode {
for (i, mut node) in self.open_elems.iter().enumerate().rev() {
- let last = i == 0us;
+ let last = i == 0usize;
if let (true, Some(ctx)) = (last, self.context_elem.as_ref()) {
node = ctx;
}
@@ -771,7 +771,7 @@ impl TreeBuilderActions
fn create_formatting_element_for(&mut self, tag: Tag) -> Handle {
// FIXME: This really wants unit tests.
let mut first_match = None;
- let mut matches = 0us;
+ let mut matches = 0usize;
for (i, _, old_tag) in self.active_formatting_end_to_marker() {
if tag.equiv_modulo_attr_order(old_tag) {
first_match = Some(i);
diff --git a/src/tree_builder/interface.rs b/src/tree_builder/interface.rs
index a3b9e7bb..0a872b7d 100644
--- a/src/tree_builder/interface.rs
+++ b/src/tree_builder/interface.rs
@@ -16,7 +16,7 @@ use tokenizer::Attribute;
use collections::vec::Vec;
use collections::string::String;
-use std::string::CowString;
+use std::borrow::Cow;
use string_cache::QualName;
@@ -48,7 +48,7 @@ pub trait TreeSink {
type Handle: Clone;
/// Signal a parse error.
- fn parse_error(&mut self, msg: CowString<'static>);
+ fn parse_error(&mut self, msg: Cow<'static, str>);
/// Get a handle to the `Document` node.
fn get_document(&mut self) -> Self::Handle;
diff --git a/src/tree_builder/mod.rs b/src/tree_builder/mod.rs
index f4763659..c89c8fdf 100644
--- a/src/tree_builder/mod.rs
+++ b/src/tree_builder/mod.rs
@@ -33,7 +33,7 @@ use core::mem::replace;
use collections::vec::Vec;
use collections::string::String;
use std::borrow::Cow::Borrowed;
-use collections::RingBuf;
+use collections::VecDeque;
#[macro_use] mod tag_sets;
// "pub" is a workaround for rust#18241 (?)
@@ -324,7 +324,7 @@ impl TreeBuilder
fn process_to_completion(&mut self, mut token: Token) {
// Queue of additional tokens yet to be processed.
// This stays empty in the common case where we don't split whitespace.
- let mut more_tokens = RingBuf::new();
+ let mut more_tokens = VecDeque::new();
loop {
let is_self_closing = match token {
diff --git a/src/tree_builder/rules.rs b/src/tree_builder/rules.rs
index ec95a6e7..259f18af 100644
--- a/src/tree_builder/rules.rs
+++ b/src/tree_builder/rules.rs
@@ -31,12 +31,12 @@ fn any_not_whitespace(x: &String) -> bool {
}
// This goes in a trait so that we can control visibility.
-pub trait TreeBuilderStep {
+pub trait TreeBuilderStep {
fn step(&mut self, mode: InsertionMode, token: Token) -> ProcessResult;
}
#[doc(hidden)]
-impl TreeBuilderStep
+impl TreeBuilderStep
for super::TreeBuilder
where Handle: Clone,
Sink: TreeSink,
diff --git a/src/util/smallcharset.rs b/src/util/smallcharset.rs
index 04b8084f..0a138575 100644
--- a/src/util/smallcharset.rs
+++ b/src/util/smallcharset.rs
@@ -52,8 +52,8 @@ mod test {
#[test]
fn nonmember_prefix() {
for &c in ['&', '\0'].iter() {
- for x in range(0, 48us) {
- for y in range(0, 48us) {
+ for x in range(0, 48usize) {
+ for y in range(0, 48usize) {
let mut s = repeat("x").take(x).collect::();
s.push(c);
s.push_str(repeat("x").take(y).collect::().as_slice());
diff --git a/test_util/src/lib.rs b/test_util/src/lib.rs
index 3fc62879..ae4dcf5b 100644
--- a/test_util/src/lib.rs
+++ b/test_util/src/lib.rs
@@ -7,7 +7,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(io, path)]
+#![feature(old_io, old_path)]
use std::old_io as io;
use std::old_path::{GenericPath,Path};
diff --git a/tests/tokenizer.rs b/tests/tokenizer.rs
index e2b594d7..031c8b29 100644
--- a/tests/tokenizer.rs
+++ b/tests/tokenizer.rs
@@ -7,7 +7,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(core, env, io, path, plugin, rustc_private, start, std_misc, test)]
+#![feature(core, env, old_io, old_path, plugin, rustc_private, start, std_misc, test)]
#![plugin(string_cache_plugin)]
@@ -311,7 +311,7 @@ fn mk_test(desc: String, input: String, expect: Vec, opts: TokenizerOpts)
},
testfn: DynTestFn(Thunk::new(move || {
// Split up the input at different points to test incremental tokenization.
- let insplits = splits(input.as_slice(), 3);
+ let insplits = splits(&input, 3);
for input in insplits.into_iter() {
// Clone 'input' so we have it for the failure message.
// Also clone opts. If we don't, we get the wrong
diff --git a/tests/tree_builder.rs b/tests/tree_builder.rs
index 268f6bd7..dd14a35a 100644
--- a/tests/tree_builder.rs
+++ b/tests/tree_builder.rs
@@ -7,7 +7,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(core, env, io, path, plugin, start, std_misc, test)]
+#![feature(core, env, old_io, old_path, plugin, start, std_misc, test)]
#![plugin(string_cache_plugin)]