diff --git a/src/etc/generate-keyword-tests.py b/src/etc/generate-keyword-tests.py
index 937c231a473e9..e53d6c718c155 100755
--- a/src/etc/generate-keyword-tests.py
+++ b/src/etc/generate-keyword-tests.py
@@ -34,15 +34,17 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// compile-flags: -Z parse-only
+
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py %s'
 
 fn main() {
-    let %s = "foo"; //~ error: ident
+    let %s = "foo"; //~ error: expected pattern, found keyword `%s`
 }
 """
 
 test_dir = os.path.abspath(
-    os.path.join(os.path.dirname(__file__), '../test/compile-fail')
+    os.path.join(os.path.dirname(__file__), '../test/parse-fail')
 )
 
 for kw in sys.argv[1:]:
@@ -53,7 +55,7 @@
         os.chmod(test_file, stat.S_IWUSR)
 
     with open(test_file, 'wt') as f:
-        f.write(template % (datetime.datetime.now().year, kw, kw))
+        f.write(template % (datetime.datetime.now().year, kw, kw, kw))
 
     # mark file read-only
     os.chmod(test_file, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
diff --git a/src/librustc/hir/fold.rs b/src/librustc/hir/fold.rs
index a6ff716488526..fa391538b9c60 100644
--- a/src/librustc/hir/fold.rs
+++ b/src/librustc/hir/fold.rs
@@ -18,7 +18,7 @@ use syntax::attr::ThinAttributesExt;
 use hir;
 use syntax::codemap::{respan, Span, Spanned};
 use syntax::ptr::P;
-use syntax::parse::token;
+use syntax::parse::token::keywords;
 use syntax::util::move_map::MoveMap;
 
 pub trait Folder : Sized {
@@ -867,7 +867,7 @@ pub fn noop_fold_crate<T: Folder>(Crate { module, attrs, config, span,
     let config = folder.fold_meta_items(config);
 
     let crate_mod = folder.fold_item(hir::Item {
-        name: token::special_idents::invalid.name,
+        name: keywords::Invalid.name(),
         attrs: attrs,
         id: DUMMY_NODE_ID,
         vis: hir::Public,
@@ -1060,10 +1060,11 @@ pub fn noop_fold_expr<T: Folder>(Expr { id, node, span, attrs }: Expr, folder: &
                           arms.move_map(|x| folder.fold_arm(x)),
                           source)
             }
-            ExprClosure(capture_clause, decl, body) => {
+            ExprClosure(capture_clause, decl, body, fn_decl_span) => {
                 ExprClosure(capture_clause,
                             folder.fold_fn_decl(decl),
-                            folder.fold_block(body))
+                            folder.fold_block(body),
+                            folder.new_span(fn_decl_span))
             }
             ExprBlock(blk) => ExprBlock(folder.fold_block(blk)),
             ExprAssign(el, er) => {
diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs
index 6e6f845abd368..ec9b465521b97 100644
--- a/src/librustc/hir/intravisit.rs
+++ b/src/librustc/hir/intravisit.rs
@@ -785,7 +785,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
             visitor.visit_expr(subexpression);
             walk_list!(visitor, visit_arm, arms);
         }
-        ExprClosure(_, ref function_declaration, ref body) => {
+        ExprClosure(_, ref function_declaration, ref body, _fn_decl_span) => {
             visitor.visit_fn(FnKind::Closure(expression.attrs.as_attr_slice()),
                              function_declaration,
                              body,
diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs
index 0a01cc91f0e30..6f30553e26638 100644
--- a/src/librustc/hir/lowering.rs
+++ b/src/librustc/hir/lowering.rs
@@ -1260,11 +1260,12 @@ pub fn lower_expr(lctx: &LoweringContext, e: &Expr) -> P<hir::Expr> {
                                arms.iter().map(|x| lower_arm(lctx, x)).collect(),
                                hir::MatchSource::Normal)
             }
-            ExprKind::Closure(capture_clause, ref decl, ref body) => {
+            ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => {
                 lctx.with_parent_def(e.id, || {
                     hir::ExprClosure(lower_capture_clause(lctx, capture_clause),
                                      lower_fn_decl(lctx, decl),
-                                     lower_block(lctx, body))
+                                     lower_block(lctx, body),
+                                     fn_decl_span)
                 })
             }
             ExprKind::Block(ref blk) => hir::ExprBlock(lower_block(lctx, blk)),
diff --git a/src/librustc/hir/map/blocks.rs b/src/librustc/hir/map/blocks.rs
index 8c626226bd3c3..4af37fe16adb1 100644
--- a/src/librustc/hir/map/blocks.rs
+++ b/src/librustc/hir/map/blocks.rs
@@ -250,7 +250,7 @@ impl<'a> FnLikeNode<'a> {
                 }
             }
             map::NodeExpr(e) => match e.node {
-                ast::ExprClosure(_, ref decl, ref block) =>
+                ast::ExprClosure(_, ref decl, ref block, _fn_decl_span) =>
                     closure(ClosureParts::new(&decl,
                                               &block,
                                               e.id,
diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs
index 4e7520035238f..7eaace91ae9b6 100644
--- a/src/librustc/hir/mod.rs
+++ b/src/librustc/hir/mod.rs
@@ -949,8 +949,10 @@ pub enum Expr_ {
     /// A `match` block, with a source that indicates whether or not it is
     /// the result of a desugaring, and if so, which kind.
     ExprMatch(P<Expr>, HirVec<Arm>, MatchSource),
-    /// A closure (for example, `move |a, b, c| {a + b + c}`)
-    ExprClosure(CaptureClause, P<FnDecl>, P<Block>),
+    /// A closure (for example, `move |a, b, c| {a + b + c}`).
+    ///
+    /// The final span is the span of the argument block `|...|`
+    ExprClosure(CaptureClause, P<FnDecl>, P<Block>, Span),
     /// A block (`{ ... }`)
     ExprBlock(P<Block>),
 
diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs
index e595c619e859b..db179c1cc1bf6 100644
--- a/src/librustc/hir/print.rs
+++ b/src/librustc/hir/print.rs
@@ -14,9 +14,8 @@ use syntax::abi::Abi;
 use syntax::ast;
 use syntax::codemap::{self, CodeMap, BytePos, Spanned};
 use syntax::errors;
-use syntax::parse::token::{self, BinOpToken};
+use syntax::parse::token::{self, keywords, BinOpToken};
 use syntax::parse::lexer::comments;
-use syntax::parse;
 use syntax::print::pp::{self, break_offset, word, space, hardbreak};
 use syntax::print::pp::{Breaks, eof};
 use syntax::print::pp::Breaks::{Consistent, Inconsistent};
@@ -1392,7 +1391,7 @@ impl<'a> State<'a> {
                 }
                 self.bclose_(expr.span, indent_unit)?;
             }
-            hir::ExprClosure(capture_clause, ref decl, ref body) => {
+            hir::ExprClosure(capture_clause, ref decl, ref body, _fn_decl_span) => {
                 self.print_capture_clause(capture_clause)?;
 
                 self.print_fn_block_args(&decl)?;
@@ -2209,9 +2208,8 @@ impl<'a> State<'a> {
             hir::TyInfer if is_closure => self.print_pat(&input.pat)?,
             _ => {
                 match input.pat.node {
-                    PatKind::Ident(_, ref path1, _) if
-                        path1.node.name ==
-                            parse::token::special_idents::invalid.name => {
+                    PatKind::Ident(_, ref path1, _)
+                            if path1.node.name == keywords::Invalid.name() => {
                         // Do nothing.
                     }
                     _ => {
diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs
index c4d6f100671b0..002f202796cef 100644
--- a/src/librustc/middle/expr_use_visitor.rs
+++ b/src/librustc/middle/expr_use_visitor.rs
@@ -537,8 +537,8 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> {
                 self.consume_expr(&count);
             }
 
-            hir::ExprClosure(..) => {
-                self.walk_captures(expr)
+            hir::ExprClosure(_, _, _, fn_decl_span) => {
+                self.walk_captures(expr, fn_decl_span)
             }
 
             hir::ExprBox(ref base) => {
@@ -1142,7 +1142,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> {
         }));
     }
 
-    fn walk_captures(&mut self, closure_expr: &hir::Expr) {
+    fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) {
         debug!("walk_captures({:?})", closure_expr);
 
         self.tcx().with_freevars(closure_expr.id, |freevars| {
@@ -1152,7 +1152,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> {
                                              closure_expr_id: closure_expr.id };
                 let upvar_capture = self.typer.upvar_capture(upvar_id).unwrap();
                 let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id,
-                                                                   closure_expr.span,
+                                                                   fn_decl_span,
                                                                    freevar.def));
                 match upvar_capture {
                     ty::UpvarCapture::ByValue => {
@@ -1161,7 +1161,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> {
                     }
                     ty::UpvarCapture::ByRef(upvar_borrow) => {
                         self.delegate.borrow(closure_expr.id,
-                                             closure_expr.span,
+                                             fn_decl_span,
                                              cmt_var,
                                              upvar_borrow.region,
                                              upvar_borrow.kind,
diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs
index 35991ae56c8a1..b9b182fd4d0e3 100644
--- a/src/librustc/middle/liveness.rs
+++ b/src/librustc/middle/liveness.rs
@@ -125,7 +125,7 @@ use std::io;
 use std::rc::Rc;
 use syntax::ast::{self, NodeId};
 use syntax::codemap::{BytePos, original_sp, Span};
-use syntax::parse::token::special_idents;
+use syntax::parse::token::keywords;
 use syntax::ptr::P;
 
 use hir::Expr;
@@ -948,7 +948,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
               self.propagate_through_expr(&e, succ)
           }
 
-          hir::ExprClosure(_, _, ref blk) => {
+          hir::ExprClosure(_, _, ref blk, _) => {
               debug!("{} is an ExprClosure",
                      expr_to_string(expr));
 
@@ -1578,7 +1578,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
                 let var = self.variable(p_id, sp);
                 // Ignore unused self.
                 let name = path1.node;
-                if name != special_idents::self_.name {
+                if name != keywords::SelfValue.name() {
                     if !self.warn_about_unused(sp, p_id, entry_ln, var) {
                         if self.live_on_entry(entry_ln, var).is_none() {
                             self.report_dead_assign(p_id, sp, var, true);
diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs
index 2f77552c389ad..7b31cd815677e 100644
--- a/src/librustc/middle/mem_categorization.rs
+++ b/src/librustc/middle/mem_categorization.rs
@@ -728,7 +728,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> {
             };
 
             match fn_expr.node {
-                hir::ExprClosure(_, _, ref body) => body.id,
+                hir::ExprClosure(_, _, ref body, _) => body.id,
                 _ => bug!()
             }
         };
diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs
index 585b65b9f5e42..23eb5a56c8439 100644
--- a/src/librustc/middle/resolve_lifetime.rs
+++ b/src/librustc/middle/resolve_lifetime.rs
@@ -29,7 +29,7 @@ use std::fmt;
 use std::mem::replace;
 use syntax::ast;
 use syntax::codemap::Span;
-use syntax::parse::token::special_idents;
+use syntax::parse::token::keywords;
 use util::nodemap::NodeMap;
 
 use hir;
@@ -245,7 +245,7 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> {
     }
 
     fn visit_lifetime(&mut self, lifetime_ref: &hir::Lifetime) {
-        if lifetime_ref.name == special_idents::static_lifetime.name {
+        if lifetime_ref.name == keywords::StaticLifetime.name() {
             self.insert_lifetime(lifetime_ref, DefStaticRegion);
             return;
         }
@@ -672,9 +672,8 @@ impl<'a> LifetimeContext<'a> {
         for i in 0..lifetimes.len() {
             let lifetime_i = &lifetimes[i];
 
-            let special_idents = [special_idents::static_lifetime];
             for lifetime in lifetimes {
-                if special_idents.iter().any(|&i| i.name == lifetime.lifetime.name) {
+                if lifetime.lifetime.name == keywords::StaticLifetime.name() {
                     span_err!(self.sess, lifetime.lifetime.span, E0262,
                         "invalid lifetime parameter name: `{}`", lifetime.lifetime.name);
                 }
diff --git a/src/librustc/mir/repr.rs b/src/librustc/mir/repr.rs
index 09383e69553be..9ec05a9b2927c 100644
--- a/src/librustc/mir/repr.rs
+++ b/src/librustc/mir/repr.rs
@@ -200,7 +200,7 @@ pub struct ArgDecl<'tcx> {
     /// and has to be collected from multiple actual arguments.
     pub spread: bool,
 
-    /// Either special_idents::invalid or the name of a single-binding
+    /// Either keywords::Invalid or the name of a single-binding
     /// pattern associated with this argument. Useful for debuginfo.
     pub debug_name: Name
 }
diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs
index 10000607b5409..5bde6df5123d6 100644
--- a/src/librustc/ty/context.rs
+++ b/src/librustc/ty/context.rs
@@ -44,7 +44,7 @@ use std::hash::{Hash, Hasher};
 use std::rc::Rc;
 use syntax::ast::{self, Name, NodeId};
 use syntax::attr;
-use syntax::parse::token::{self, special_idents};
+use syntax::parse::token::{self, keywords};
 
 use hir;
 
@@ -1069,7 +1069,7 @@ impl<'tcx> TyCtxt<'tcx> {
     }
 
     pub fn mk_self_type(&self) -> Ty<'tcx> {
-        self.mk_param(subst::SelfSpace, 0, special_idents::type_self.name)
+        self.mk_param(subst::SelfSpace, 0, keywords::SelfType.name())
     }
 
     pub fn mk_param_from_def(&self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs
index c0fb60d4dd3dc..709ec051ddcd6 100644
--- a/src/librustc/ty/sty.rs
+++ b/src/librustc/ty/sty.rs
@@ -24,7 +24,7 @@ use std::ops;
 use std::mem;
 use syntax::abi;
 use syntax::ast::{self, Name};
-use syntax::parse::token::special_idents;
+use syntax::parse::token::keywords;
 
 use serialize::{Decodable, Decoder};
 
@@ -533,7 +533,7 @@ impl ParamTy {
     }
 
     pub fn for_self() -> ParamTy {
-        ParamTy::new(subst::SelfSpace, 0, special_idents::type_self.name)
+        ParamTy::new(subst::SelfSpace, 0, keywords::SelfType.name())
     }
 
     pub fn for_def(def: &ty::TypeParameterDef) -> ParamTy {
diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs
index d7c928b8d6e1f..15db356b1ba95 100644
--- a/src/librustc_borrowck/borrowck/mod.rs
+++ b/src/librustc_borrowck/borrowck/mod.rs
@@ -415,7 +415,7 @@ pub fn closure_to_block(closure_id: ast::NodeId,
                         tcx: &TyCtxt) -> ast::NodeId {
     match tcx.map.get(closure_id) {
         hir_map::NodeExpr(expr) => match expr.node {
-            hir::ExprClosure(_, _, ref block) => {
+            hir::ExprClosure(_, _, ref block, _) => {
                 block.id
             }
             _ => {
diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs
index b1f35541134f0..b088425d58a2a 100644
--- a/src/librustc_mir/build/mod.rs
+++ b/src/librustc_mir/build/mod.rs
@@ -18,7 +18,7 @@ use rustc::hir::pat_util::pat_is_binding;
 use std::ops::{Index, IndexMut};
 use syntax::ast;
 use syntax::codemap::Span;
-use syntax::parse::token;
+use syntax::parse::token::keywords;
 
 pub struct Builder<'a, 'tcx: 'a> {
     hir: Cx<'a, 'tcx>,
@@ -238,7 +238,7 @@ pub fn construct<'a,'tcx>(hir: Cx<'a,'tcx>,
                 ty::UpvarCapture::ByRef(..) => true
             });
             let mut decl = UpvarDecl {
-                debug_name: token::special_idents::invalid.name,
+                debug_name: keywords::Invalid.name(),
                 by_ref: by_ref
             };
             if let Some(hir::map::NodeLocal(pat)) = tcx.map.find(fv.def.var_id()) {
@@ -296,7 +296,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
                 self.schedule_drop(pattern.as_ref().map_or(ast_block.span, |pat| pat.span),
                                    argument_extent, &lvalue, ty);
 
-                let mut name = token::special_idents::invalid.name;
+                let mut name = keywords::Invalid.name();
                 if let Some(pat) = pattern {
                     if let hir::PatKind::Ident(_, ref ident, _) = pat.node {
                         if pat_is_binding(&self.hir.tcx().def_map.borrow(), pat) {
diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs
index 12dcb32da3fcd..7dab8c4c5fb2a 100644
--- a/src/librustc_mir/hair/cx/expr.rs
+++ b/src/librustc_mir/hair/cx/expr.rs
@@ -725,7 +725,7 @@ fn convert_var<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>,
             let body_id = match cx.tcx.map.find(closure_expr_id) {
                 Some(map::NodeExpr(expr)) => {
                     match expr.node {
-                        hir::ExprClosure(_, _, ref body) => body.id,
+                        hir::ExprClosure(_, _, ref body, _) => body.id,
                         _ => {
                             span_bug!(expr.span, "closure expr is not a closure expr");
                         }
diff --git a/src/librustc_passes/loops.rs b/src/librustc_passes/loops.rs
index 9a58a704c52cc..2174d1cf9b82a 100644
--- a/src/librustc_passes/loops.rs
+++ b/src/librustc_passes/loops.rs
@@ -48,7 +48,7 @@ impl<'a, 'v> Visitor<'v> for CheckLoopVisitor<'a> {
             hir::ExprLoop(ref b, _) => {
                 self.with_context(Loop, |v| v.visit_block(&b));
             }
-            hir::ExprClosure(_, _, ref b) => {
+            hir::ExprClosure(_, _, ref b, _) => {
                 self.with_context(Closure, |v| v.visit_block(&b));
             }
             hir::ExprBreak(_) => self.require_loop("break", e.span),
diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs
index effc751c50759..ed473da19176b 100644
--- a/src/librustc_resolve/build_reduced_graph.rs
+++ b/src/librustc_resolve/build_reduced_graph.rs
@@ -112,15 +112,14 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> {
             !segment.parameters.bindings().is_empty()
         });
         if found_param {
-            self.session.span_err(path.span,
-                                  "type or lifetime parameter is found in import path");
+            self.session.span_err(path.span, "type or lifetime parameters in import path");
         }
 
         // Checking for special identifiers in path
         // prevent `self` or `super` at beginning of global path
         if path.global && path.segments.len() > 0 {
             let first = path.segments[0].identifier.name;
-            if first == keywords::Super.to_name() || first == keywords::SelfValue.to_name() {
+            if first == keywords::Super.name() || first == keywords::SelfValue.name() {
                 self.session.add_lint(
                     lint::builtin::SUPER_OR_SELF_IN_GLOBAL_PATH, id, path.span,
                     format!("expected identifier, found keyword `{}`", first)
diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs
index 8326b8b95e996..293b4de71fac4 100644
--- a/src/librustc_resolve/lib.rs
+++ b/src/librustc_resolve/lib.rs
@@ -62,7 +62,7 @@ use syntax::ast::{CRATE_NODE_ID, Name, NodeId, CrateNum, IntTy, UintTy};
 use syntax::attr::AttrMetaMethods;
 use syntax::codemap::{self, Span, Pos};
 use syntax::errors::DiagnosticBuilder;
-use syntax::parse::token::{self, special_names, special_idents};
+use syntax::parse::token::{self, keywords};
 use syntax::util::lev_distance::find_best_match_for_name;
 
 use rustc::hir::intravisit::{self, FnKind, Visitor};
@@ -1954,8 +1954,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
         let mut self_type_rib = Rib::new(NormalRibKind);
 
         // plain insert (no renaming, types are not currently hygienic....)
-        let name = special_names::type_self;
-        self_type_rib.bindings.insert(name, self_def);
+        self_type_rib.bindings.insert(keywords::SelfType.name(), self_def);
         self.type_ribs.push(self_type_rib);
         f(self);
         if !self.resolved {
@@ -2195,11 +2194,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
                             "type name"
                         };
 
-                        let self_type_name = special_idents::type_self.name;
                         let is_invalid_self_type_name = path.segments.len() > 0 &&
                                                         maybe_qself.is_none() &&
                                                         path.segments[0].identifier.name ==
-                                                        self_type_name;
+                                                        keywords::SelfType.name();
                         if is_invalid_self_type_name {
                             resolve_error(self,
                                           ty.span,
@@ -2643,7 +2641,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
                           namespace: Namespace,
                           record_used: bool)
                           -> Option<LocalDef> {
-        if identifier.name == special_idents::invalid.name {
+        if identifier.unhygienic_name == keywords::Invalid.name() {
             return Some(LocalDef::from_def(Def::Err));
         }
 
@@ -3074,7 +3072,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
                                 false // Stop advancing
                             });
 
-                            if method_scope && special_names::self_.as_str() == &path_name[..] {
+                            if method_scope &&
+                                    &path_name[..] == keywords::SelfValue.name().as_str() {
                                 resolve_error(self,
                                               expr.span,
                                               ResolutionError::SelfNotAvailableInStaticMethod);
@@ -3612,7 +3611,7 @@ fn module_to_string(module: Module) -> String {
             }
             BlockParentLink(ref module, _) => {
                 // danger, shouldn't be ident?
-                names.push(special_idents::opaque.name);
+                names.push(token::intern("<opaque>"));
                 collect_mod(names, module);
             }
         }
diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs
index bf6ad7039636e..4ba66c18e6eba 100644
--- a/src/librustc_save_analysis/dump_visitor.rs
+++ b/src/librustc_save_analysis/dump_visitor.rs
@@ -1011,7 +1011,7 @@ impl<'v, 'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'v> for DumpVisitor<'l, 'tcx,
                                 span: sub_span.expect("No span found for use"),
                                 id: item.id,
                                 mod_id: mod_id,
-                                name: ident.name.to_string(),
+                                name: ident.to_string(),
                                 scope: self.cur_scope
                             }.normalize(&self.tcx));
                         }
@@ -1075,7 +1075,7 @@ impl<'v, 'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'v> for DumpVisitor<'l, 'tcx,
                 if !self.span.filter_generated(alias_span, item.span) {
                     self.dumper.extern_crate(item.span, ExternCrateData {
                         id: item.id,
-                        name: item.ident.name.to_string(),
+                        name: item.ident.to_string(),
                         crate_num: cnum,
                         location: location,
                         span: alias_span.expect("No span found for extern crate"),
@@ -1258,7 +1258,7 @@ impl<'v, 'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'v> for DumpVisitor<'l, 'tcx,
                                    ty),
                 }
             }
-            ast::ExprKind::Closure(_, ref decl, ref body) => {
+            ast::ExprKind::Closure(_, ref decl, ref body, _fn_decl_span) => {
                 let mut id = String::from("$");
                 id.push_str(&ex.id.to_string());
                 self.process_formals(&decl.inputs, &id);
diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs
index cea67f46db527..9190389b722b6 100644
--- a/src/librustc_trans/base.rs
+++ b/src/librustc_trans/base.rs
@@ -1339,7 +1339,7 @@ fn build_cfg(tcx: &TyCtxt, id: ast::NodeId) -> (ast::NodeId, Option<cfg::CFG>) {
         }
         Some(hir_map::NodeExpr(e)) => {
             match e.node {
-                hir::ExprClosure(_, _, ref blk) => blk,
+                hir::ExprClosure(_, _, ref blk, _) => blk,
                 _ => bug!("unexpected expr variant in has_nested_returns"),
             }
         }
diff --git a/src/librustc_trans/consts.rs b/src/librustc_trans/consts.rs
index 89f3b295c8d22..b9af0bbe3d123 100644
--- a/src/librustc_trans/consts.rs
+++ b/src/librustc_trans/consts.rs
@@ -990,7 +990,7 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                 None => C_nil(cx),
             }
         },
-        hir::ExprClosure(_, ref decl, ref body) => {
+        hir::ExprClosure(_, ref decl, ref body, _) => {
             match ety.sty {
                 ty::TyClosure(def_id, ref substs) => {
                     closure::trans_closure_expr(closure::Dest::Ignore(cx),
diff --git a/src/librustc_trans/debuginfo/create_scope_map.rs b/src/librustc_trans/debuginfo/create_scope_map.rs
index 4b1292e4086f7..3a8974c2aca03 100644
--- a/src/librustc_trans/debuginfo/create_scope_map.rs
+++ b/src/librustc_trans/debuginfo/create_scope_map.rs
@@ -479,7 +479,7 @@ fn walk_expr(cx: &CrateContext,
             })
         }
 
-        hir::ExprClosure(_, ref decl, ref block) => {
+        hir::ExprClosure(_, ref decl, ref block, _) => {
             with_new_scope(cx,
                            block.span,
                            scope_stack,
diff --git a/src/librustc_trans/expr.rs b/src/librustc_trans/expr.rs
index 6955d51ceccaf..cd11ca586890c 100644
--- a/src/librustc_trans/expr.rs
+++ b/src/librustc_trans/expr.rs
@@ -1118,7 +1118,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         hir::ExprVec(..) | hir::ExprRepeat(..) => {
             tvec::trans_fixed_vstore(bcx, expr, dest)
         }
-        hir::ExprClosure(_, ref decl, ref body) => {
+        hir::ExprClosure(_, ref decl, ref body, _) => {
             let dest = match dest {
                 SaveIn(lldest) => closure::Dest::SaveIn(bcx, lldest),
                 Ignore => closure::Dest::Ignore(bcx.ccx())
diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs
index 3874ebc91307b..7ec3c9345be58 100644
--- a/src/librustc_trans/mir/mod.rs
+++ b/src/librustc_trans/mir/mod.rs
@@ -22,7 +22,7 @@ use machine;
 use type_of;
 
 use syntax::codemap::DUMMY_SP;
-use syntax::parse::token;
+use syntax::parse::token::keywords;
 
 use std::ops::Deref;
 use std::rc::Rc;
@@ -286,7 +286,7 @@ fn arg_value_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>,
                         alloca: lltemp,
                         address_operations: &ops
                     };
-                    declare_local(bcx, token::special_idents::invalid.name,
+                    declare_local(bcx, keywords::Invalid.name(),
                                   tupled_arg_ty, scope, variable_access,
                                   VariableKind::ArgumentVariable(arg_index + i + 1),
                                   bcx.fcx().span.unwrap_or(DUMMY_SP));
diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs
index 674c3d6f9a17a..d6e64ccd259e6 100644
--- a/src/librustc_typeck/astconv.rs
+++ b/src/librustc_typeck/astconv.rs
@@ -73,7 +73,7 @@ use syntax::{abi, ast};
 use syntax::codemap::{Span, Pos};
 use syntax::errors::DiagnosticBuilder;
 use syntax::feature_gate::{GateIssue, emit_feature_err};
-use syntax::parse::token;
+use syntax::parse::token::{self, keywords};
 
 use rustc::hir::print as pprust;
 use rustc::hir;
@@ -1313,7 +1313,7 @@ fn associated_path_def_to_ty<'tcx>(this: &AstConv<'tcx>,
             let trait_node_id = tcx.map.as_local_node_id(trait_did).unwrap();
             match find_bound_for_assoc_item(this,
                                             trait_node_id,
-                                            token::special_idents::type_self.name,
+                                            keywords::SelfType.name(),
                                             assoc_name,
                                             span) {
                 Ok(bound) => bound,
diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs
index 67b91f7838c66..57f56530009eb 100644
--- a/src/librustc_typeck/check/mod.rs
+++ b/src/librustc_typeck/check/mod.rs
@@ -120,7 +120,7 @@ use syntax::attr;
 use syntax::attr::AttrMetaMethods;
 use syntax::codemap::{self, Span, Spanned};
 use syntax::errors::DiagnosticBuilder;
-use syntax::parse::token::{self, InternedString, special_idents};
+use syntax::parse::token::{self, InternedString, keywords};
 use syntax::ptr::P;
 use syntax::util::lev_distance::find_best_match_for_name;
 
@@ -2851,7 +2851,7 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
                 method_ty
             }
             Err(error) => {
-                if method_name.node != special_idents::invalid.name {
+                if method_name.node != keywords::Invalid.name() {
                     method::report_error(fcx, method_name.span, expr_t,
                                          method_name.node, Some(rcvr), error);
                 }
@@ -2990,7 +2990,7 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
             let msg = format!("field `{}` of struct `{}` is private", field.node, struct_path);
             fcx.tcx().sess.span_err(expr.span, &msg);
             fcx.write_ty(expr.id, field_ty);
-        } else if field.node == special_idents::invalid.name {
+        } else if field.node == keywords::Invalid.name() {
             fcx.write_error(expr.id);
         } else if method::exists(fcx, field.span, field.node, expr_t, expr.id) {
             fcx.type_error_struct(field.span,
@@ -3530,7 +3530,7 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
       hir::ExprMatch(ref discrim, ref arms, match_src) => {
         _match::check_match(fcx, expr, &discrim, arms, expected, match_src);
       }
-      hir::ExprClosure(capture, ref decl, ref body) => {
+      hir::ExprClosure(capture, ref decl, ref body, _) => {
           closure::check_expr_closure(fcx, expr, capture, &decl, &body, expected);
       }
       hir::ExprBlock(ref b) => {
@@ -3780,7 +3780,7 @@ pub fn resolve_ty_and_def_ufcs<'a, 'b, 'tcx>(fcx: &FnCtxt<'b, 'tcx>,
                     method::MethodError::PrivateMatch(def) => Some(def),
                     _ => None,
                 };
-                if item_name != special_idents::invalid.name {
+                if item_name != keywords::Invalid.name() {
                     method::report_error(fcx, span, ty, item_name, None, error);
                 }
                 def
diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs
index 2a4de6e091b3d..5efd57c4d1fc9 100644
--- a/src/librustc_typeck/check/regionck.rs
+++ b/src/librustc_typeck/check/regionck.rs
@@ -782,7 +782,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &hir::Expr) {
             intravisit::walk_expr(rcx, expr);
         }
 
-        hir::ExprClosure(_, _, ref body) => {
+        hir::ExprClosure(_, _, ref body, _) => {
             check_expr_fn_block(rcx, expr, &body);
         }
 
diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs
index 40481cda76290..c39e992eb3642 100644
--- a/src/librustc_typeck/check/upvar.rs
+++ b/src/librustc_typeck/check/upvar.rs
@@ -98,7 +98,7 @@ struct SeedBorrowKind<'a,'tcx:'a> {
 impl<'a, 'tcx, 'v> Visitor<'v> for SeedBorrowKind<'a, 'tcx> {
     fn visit_expr(&mut self, expr: &hir::Expr) {
         match expr.node {
-            hir::ExprClosure(cc, _, ref body) => {
+            hir::ExprClosure(cc, _, ref body, _) => {
                 self.check_closure(expr, cc, &body);
             }
 
diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs
index 1b21e6ce9ebe3..3bfd53ceadae8 100644
--- a/src/librustc_typeck/check/wfcheck.rs
+++ b/src/librustc_typeck/check/wfcheck.rs
@@ -24,7 +24,7 @@ use std::collections::HashSet;
 use syntax::ast;
 use syntax::codemap::{Span};
 use syntax::errors::DiagnosticBuilder;
-use syntax::parse::token::{special_idents};
+use syntax::parse::token::keywords;
 use rustc::hir::intravisit::{self, Visitor};
 use rustc::hir;
 
@@ -472,7 +472,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
     {
         let name = match space {
             TypeSpace => ast_generics.ty_params[index].name,
-            SelfSpace => special_idents::type_self.name,
+            SelfSpace => keywords::SelfType.name(),
             FnSpace => bug!("Fn space occupied?"),
         };
 
diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs
index 68327ccd39ab6..ad78740921cbc 100644
--- a/src/librustc_typeck/check/writeback.rs
+++ b/src/librustc_typeck/check/writeback.rs
@@ -156,7 +156,7 @@ impl<'cx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'tcx> {
         self.visit_method_map_entry(ResolvingExpr(e.span),
                                     MethodCall::expr(e.id));
 
-        if let hir::ExprClosure(_, ref decl, _) = e.node {
+        if let hir::ExprClosure(_, ref decl, _, _) = e.node {
             for input in &decl.inputs {
                 self.visit_node_id(ResolvingExpr(e.span), input.id);
             }
diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs
index 6d95586bed019..4a6f7a6a2ef48 100644
--- a/src/librustc_typeck/collect.rs
+++ b/src/librustc_typeck/collect.rs
@@ -93,7 +93,7 @@ use syntax::abi;
 use syntax::ast;
 use syntax::attr;
 use syntax::codemap::Span;
-use syntax::parse::token::special_idents;
+use syntax::parse::token::keywords;
 use syntax::ptr::P;
 use rustc::hir::{self, PatKind};
 use rustc::hir::intravisit;
@@ -1655,7 +1655,7 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
     let def = ty::TypeParameterDef {
         space: SelfSpace,
         index: 0,
-        name: special_idents::type_self.name,
+        name: keywords::SelfType.name(),
         def_id: ccx.tcx.map.local_def_id(param_id),
         default_def_id: ccx.tcx.map.local_def_id(parent),
         default: None,
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index c9f40a1adae32..da792b363f0a5 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -31,7 +31,7 @@ use syntax::attr;
 use syntax::attr::{AttributeMethods, AttrMetaMethods};
 use syntax::codemap;
 use syntax::codemap::{DUMMY_SP, Pos, Spanned};
-use syntax::parse::token::{self, InternedString, special_idents};
+use syntax::parse::token::{self, InternedString, keywords};
 use syntax::ptr::P;
 
 use rustc_trans::back::link;
@@ -2666,7 +2666,7 @@ fn resolve_type(cx: &DocContext,
             hir::TyFloat(ast::FloatTy::F64) => return Primitive(F64),
         },
         Def::SelfTy(..) if path.segments.len() == 1 => {
-            return Generic(special_idents::type_self.name.to_string());
+            return Generic(keywords::SelfType.name().to_string());
         }
         Def::SelfTy(..) | Def::TyParam(..) => true,
         _ => false,
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 7ca4703a2e185..2f0ae540fbf07 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -147,7 +147,7 @@ fn write_source(sess: &parse::ParseSess,
             }
 
             // keywords are also included in the identifier set
-            token::Ident(ident, _is_mod_sep) => {
+            token::Ident(ident) => {
                 match &*ident.name.as_str() {
                     "ref" | "mut" => "kw-2",
 
diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs
index c20270e830665..70b3edac5c5dc 100644
--- a/src/libstd/collections/hash/map.rs
+++ b/src/libstd/collections/hash/map.rs
@@ -830,7 +830,7 @@ impl<K, V, S> HashMap<K, V, S>
     /// }
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
-    pub fn keys<'a>(&'a self) -> Keys<'a, K, V> {
+    pub fn keys(&self) -> Keys<K, V> {
         Keys { inner: self.iter() }
     }
 
@@ -852,7 +852,7 @@ impl<K, V, S> HashMap<K, V, S>
     /// }
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
-    pub fn values<'a>(&'a self) -> Values<'a, K, V> {
+    pub fn values(&self) -> Values<K, V> {
         Values { inner: self.iter() }
     }
 
@@ -880,7 +880,7 @@ impl<K, V, S> HashMap<K, V, S>
     /// }
     /// ```
     #[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")]
-    pub fn values_mut<'a>(&'a mut self) -> ValuesMut<'a, K, V> {
+    pub fn values_mut<'a>(&'a mut self) -> ValuesMut<K, V> {
         ValuesMut { inner: self.iter_mut() }
     }
 
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index df9f935446d71..bf1c305e20674 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -93,7 +93,7 @@ impl Ident {
     pub fn new(name: Name, ctxt: SyntaxContext) -> Ident {
         Ident {name: name, ctxt: ctxt}
     }
-    pub fn with_empty_ctxt(name: Name) -> Ident {
+    pub const fn with_empty_ctxt(name: Name) -> Ident {
         Ident {name: name, ctxt: EMPTY_CTXT}
     }
 }
@@ -248,8 +248,8 @@ impl PathParameters {
     pub fn none() -> PathParameters {
         PathParameters::AngleBracketed(AngleBracketedParameterData {
             lifetimes: Vec::new(),
-            types: P::empty(),
-            bindings: P::empty(),
+            types: P::new(),
+            bindings: P::new(),
         })
     }
 
@@ -421,7 +421,7 @@ impl Default for Generics {
     fn default() ->  Generics {
         Generics {
             lifetimes: Vec::new(),
-            ty_params: P::empty(),
+            ty_params: P::new(),
             where_clause: WhereClause {
                 id: DUMMY_NODE_ID,
                 predicates: Vec::new(),
@@ -986,7 +986,9 @@ pub enum ExprKind {
     /// A `match` block.
     Match(P<Expr>, Vec<Arm>),
     /// A closure (for example, `move |a, b, c| {a + b + c}`)
-    Closure(CaptureBy, P<FnDecl>, P<Block>),
+    ///
+    /// The final span is the span of the argument block `|...|`
+    Closure(CaptureBy, P<FnDecl>, P<Block>, Span),
     /// A block (`{ ... }`)
     Block(P<Block>),
 
@@ -1206,8 +1208,7 @@ impl TokenTree {
                 TokenTree::Delimited(sp, Rc::new(Delimited {
                     delim: token::Bracket,
                     open_span: sp,
-                    tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"),
-                                                                token::Plain)),
+                    tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
                               TokenTree::Token(sp, token::Eq),
                               TokenTree::Token(sp, token::Literal(
                                   token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
@@ -1225,14 +1226,13 @@ impl TokenTree {
             }
             (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
                 let v = [TokenTree::Token(sp, token::Dollar),
-                         TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str()),
-                                                  token::Plain))];
+                         TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
                 v[index].clone()
             }
-            (&TokenTree::Token(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => {
-                let v = [TokenTree::Token(sp, token::SubstNt(name, name_st)),
+            (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
+                let v = [TokenTree::Token(sp, token::SubstNt(name)),
                          TokenTree::Token(sp, token::Colon),
-                         TokenTree::Token(sp, token::Ident(kind, kind_st))];
+                         TokenTree::Token(sp, token::Ident(kind))];
                 v[index].clone()
             }
             (&TokenTree::Sequence(_, ref seq), _) => {
diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs
index 43b4a201afc18..26088b1242e2a 100644
--- a/src/libsyntax/diagnostics/plugin.rs
+++ b/src/libsyntax/diagnostics/plugin.rs
@@ -54,7 +54,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
                                    token_tree: &[TokenTree])
                                    -> Box<MacResult+'cx> {
     let code = match (token_tree.len(), token_tree.get(0)) {
-        (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
+        (1, Some(&TokenTree::Token(_, token::Ident(code)))) => code,
         _ => unreachable!()
     };
 
@@ -92,10 +92,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
         token_tree.get(1),
         token_tree.get(2)
     ) {
-        (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
+        (1, Some(&TokenTree::Token(_, token::Ident(ref code))), None, None) => {
             (code, None)
         },
-        (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
+        (3, Some(&TokenTree::Token(_, token::Ident(ref code))),
             Some(&TokenTree::Token(_, token::Comma)),
             Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
             (code, Some(description))
@@ -160,9 +160,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
     let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
         (
             // Crate name.
-            &TokenTree::Token(_, token::Ident(ref crate_name, _)),
+            &TokenTree::Token(_, token::Ident(ref crate_name)),
             // DIAGNOSTICS ident.
-            &TokenTree::Token(_, token::Ident(ref name, _))
+            &TokenTree::Token(_, token::Ident(ref name))
         ) => (*&crate_name, name),
         _ => unreachable!()
     };
diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs
index a4e5b68277d69..67bce440d4d48 100644
--- a/src/libsyntax/ext/build.rs
+++ b/src/libsyntax/ext/build.rs
@@ -13,9 +13,7 @@ use ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind};
 use attr;
 use codemap::{Span, respan, Spanned, DUMMY_SP, Pos};
 use ext::base::ExtCtxt;
-use parse::token::special_idents;
-use parse::token::InternedString;
-use parse::token;
+use parse::token::{self, keywords, InternedString};
 use ptr::P;
 
 // Transitional reexports so qquote can find the paths it is looking for
@@ -194,10 +192,14 @@ pub trait AstBuilder {
                cond: P<ast::Expr>, then: P<ast::Expr>, els: Option<P<ast::Expr>>) -> P<ast::Expr>;
     fn expr_loop(&self, span: Span, block: P<ast::Block>) -> P<ast::Expr>;
 
-    fn lambda_fn_decl(&self, span: Span,
-                      fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> P<ast::Expr>;
+    fn lambda_fn_decl(&self,
+                      span: Span,
+                      fn_decl: P<ast::FnDecl>,
+                      blk: P<ast::Block>,
+                      fn_decl_span: Span)
+                      -> P<ast::Expr>;
 
-    fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> P<ast::Expr>;
+    fn lambda(&self, span: Span, ids: Vec<ast::Ident>, blk: P<ast::Block>) -> P<ast::Expr>;
     fn lambda0(&self, span: Span, blk: P<ast::Block>) -> P<ast::Expr>;
     fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> P<ast::Expr>;
 
@@ -602,7 +604,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         self.expr_path(self.path_ident(span, id))
     }
     fn expr_self(&self, span: Span) -> P<ast::Expr> {
-        self.expr_ident(span, special_idents::self_)
+        self.expr_ident(span, keywords::SelfValue.ident())
     }
 
     fn expr_binary(&self, sp: Span, op: ast::BinOpKind,
@@ -894,17 +896,34 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
         self.expr(span, ast::ExprKind::Loop(block, None))
     }
 
-    fn lambda_fn_decl(&self, span: Span,
-                      fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> P<ast::Expr> {
-        self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, fn_decl, blk))
+    fn lambda_fn_decl(&self,
+                      span: Span,
+                      fn_decl: P<ast::FnDecl>,
+                      blk: P<ast::Block>,
+                      fn_decl_span: Span) // span of the `|...|` part
+                      -> P<ast::Expr> {
+        self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref,
+                                               fn_decl,
+                                               blk,
+                                               fn_decl_span))
     }
-    fn lambda(&self, span: Span, ids: Vec<ast::Ident>, blk: P<ast::Block>) -> P<ast::Expr> {
+
+    fn lambda(&self,
+              span: Span,
+              ids: Vec<ast::Ident>,
+              blk: P<ast::Block>)
+              -> P<ast::Expr> {
         let fn_decl = self.fn_decl(
             ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(),
             self.ty_infer(span));
 
-        self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, fn_decl, blk))
+        // FIXME -- We are using `span` as the span of the `|...|`
+        // part of the lambda, but it probably (maybe?) corresponds to
+        // the entire lambda body. Probably we should extend the API
+        // here, but that's not entirely clear.
+        self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, fn_decl, blk, span))
     }
+
     fn lambda0(&self, span: Span, blk: P<ast::Block>) -> P<ast::Expr> {
         self.lambda(span, Vec::new(), blk)
     }
@@ -1132,7 +1151,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
                 vis: ast::Visibility, vp: P<ast::ViewPath>) -> P<ast::Item> {
         P(ast::Item {
             id: ast::DUMMY_NODE_ID,
-            ident: special_idents::invalid,
+            ident: keywords::Invalid.ident(),
             attrs: vec![],
             node: ast::ItemKind::Use(vp),
             vis: vis,
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index cd7b0fcfb0044..a2c8ae898e173 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -25,7 +25,7 @@ use fold;
 use fold::*;
 use util::move_map::MoveMap;
 use parse;
-use parse::token::{fresh_mark, fresh_name, intern};
+use parse::token::{fresh_mark, fresh_name, intern, keywords};
 use ptr::P;
 use util::small_vector::SmallVector;
 use visit;
@@ -149,14 +149,17 @@ pub fn expand_expr(e: P<ast::Expr>, fld: &mut MacroExpander) -> P<ast::Expr> {
             fld.cx.expr(span, il).with_attrs(fold_thin_attrs(attrs, fld))
         }
 
-        ast::ExprKind::Closure(capture_clause, fn_decl, block) => {
+        ast::ExprKind::Closure(capture_clause, fn_decl, block, fn_decl_span) => {
             let (rewritten_fn_decl, rewritten_block)
                 = expand_and_rename_fn_decl_and_block(fn_decl, block, fld);
             let new_node = ast::ExprKind::Closure(capture_clause,
-                                            rewritten_fn_decl,
-                                            rewritten_block);
-            P(ast::Expr{id:id, node: new_node, span: fld.new_span(span),
-                        attrs: fold_thin_attrs(attrs, fld)})
+                                                  rewritten_fn_decl,
+                                                  rewritten_block,
+                                                  fld.new_span(fn_decl_span));
+            P(ast::Expr{ id:id,
+                         node: new_node,
+                         span: fld.new_span(span),
+                         attrs: fold_thin_attrs(attrs, fld) })
         }
 
         _ => {
@@ -380,7 +383,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
 
             Some(rc) => match *rc {
                 NormalTT(ref expander, tt_span, allow_internal_unstable) => {
-                    if ident.name != parse::token::special_idents::invalid.name {
+                    if ident.name != keywords::Invalid.name() {
                         fld.cx
                             .span_err(path_span,
                                       &format!("macro {}! expects no ident argument, given '{}'",
@@ -401,7 +404,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
                     expander.expand(fld.cx, span, &marked_before[..])
                 }
                 IdentTT(ref expander, tt_span, allow_internal_unstable) => {
-                    if ident.name == parse::token::special_idents::invalid.name {
+                    if ident.name == keywords::Invalid.name() {
                         fld.cx.span_err(path_span,
                                         &format!("macro {}! expects an ident argument",
                                                 extname));
@@ -420,7 +423,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
                     expander.expand(fld.cx, span, ident, marked_tts)
                 }
                 MacroRulesTT => {
-                    if ident.name == parse::token::special_idents::invalid.name {
+                    if ident.name == keywords::Invalid.name() {
                         fld.cx.span_err(path_span, "macro_rules! expects an ident argument");
                         return SmallVector::zero();
                     }
@@ -893,7 +896,7 @@ fn expand_annotatable(a: Annotatable,
             }
             ast::ItemKind::Mod(_) | ast::ItemKind::ForeignMod(_) => {
                 let valid_ident =
-                    it.ident.name != parse::token::special_idents::invalid.name;
+                    it.ident.name != keywords::Invalid.name();
 
                 if valid_ident {
                     fld.cx.mod_push(it.ident);
@@ -1486,7 +1489,7 @@ mod tests {
     use ext::mtwt;
     use fold::Folder;
     use parse;
-    use parse::token;
+    use parse::token::{self, keywords};
     use util::parser_testing::{string_to_parser};
     use util::parser_testing::{string_to_pat, string_to_crate, strs_to_idents};
     use visit;
@@ -1807,7 +1810,7 @@ mod tests {
 
     // run one of the renaming tests
     fn run_renaming_test(t: &RenamingTest, test_idx: usize) {
-        let invalid_name = token::special_idents::invalid.name;
+        let invalid_name = keywords::Invalid.name();
         let (teststr, bound_connections, bound_ident_check) = match *t {
             (ref str,ref conns, bic) => (str.to_string(), conns.clone(), bic)
         };
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index 77aeaf8459aec..ee9a197ce56cc 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -13,7 +13,7 @@ use codemap::Span;
 use ext::base::ExtCtxt;
 use ext::base;
 use ext::build::AstBuilder;
-use parse::parser::{Parser, PathParsingMode};
+use parse::parser::{Parser, PathStyle};
 use parse::token::*;
 use parse::token;
 use ptr::P;
@@ -72,7 +72,7 @@ pub mod rt {
 
     impl ToTokens for ast::Ident {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
-            vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))]
+            vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))]
         }
     }
 
@@ -401,7 +401,7 @@ pub fn parse_meta_item_panic(parser: &mut Parser) -> P<ast::MetaItem> {
     panictry!(parser.parse_meta_item())
 }
 
-pub fn parse_path_panic(parser: &mut Parser, mode: PathParsingMode) -> ast::Path {
+pub fn parse_path_panic(parser: &mut Parser, mode: PathStyle) -> ast::Path {
     panictry!(parser.parse_path(mode))
 }
 
@@ -500,7 +500,7 @@ pub fn expand_quote_path(cx: &mut ExtCtxt,
                         sp: Span,
                         tts: &[TokenTree])
                         -> Box<base::MacResult+'static> {
-    let mode = mk_parser_path(cx, sp, "LifetimeAndTypesWithoutColons");
+    let mode = mk_parser_path(cx, sp, &["PathStyle", "Type"]);
     let expanded = expand_parse_call(cx, sp, "parse_path_panic", vec!(mode), tts);
     base::MacEager::expr(expanded)
 }
@@ -557,8 +557,9 @@ fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
     cx.expr_path(cx.path_global(sp, idents))
 }
 
-fn mk_parser_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
-    let idents = vec!(id_ext("syntax"), id_ext("parse"), id_ext("parser"), id_ext(name));
+fn mk_parser_path(cx: &ExtCtxt, sp: Span, names: &[&str]) -> P<ast::Expr> {
+    let mut idents = vec![id_ext("syntax"), id_ext("parse"), id_ext("parser")];
+    idents.extend(names.iter().cloned().map(id_ext));
     cx.expr_path(cx.path_global(sp, idents))
 }
 
@@ -646,14 +647,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
                            cx.expr_usize(sp, n))
         }
 
-        token::Ident(ident, style) => {
+        token::Ident(ident) => {
             return cx.expr_call(sp,
                                 mk_token_path(cx, sp, "Ident"),
-                                vec![mk_ident(cx, sp, ident),
-                                     match style {
-                                        ModName => mk_token_path(cx, sp, "ModName"),
-                                        Plain   => mk_token_path(cx, sp, "Plain"),
-                                     }]);
+                                vec![mk_ident(cx, sp, ident)]);
         }
 
         token::Lifetime(ident) => {
@@ -668,19 +665,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
                                 vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))));
         }
 
-        token::MatchNt(name, kind, namep, kindp) => {
+        token::MatchNt(name, kind) => {
             return cx.expr_call(sp,
                                 mk_token_path(cx, sp, "MatchNt"),
-                                vec!(mk_ident(cx, sp, name),
-                                     mk_ident(cx, sp, kind),
-                                     match namep {
-                                        ModName => mk_token_path(cx, sp, "ModName"),
-                                        Plain   => mk_token_path(cx, sp, "Plain"),
-                                     },
-                                     match kindp {
-                                        ModName => mk_token_path(cx, sp, "ModName"),
-                                        Plain   => mk_token_path(cx, sp, "Plain"),
-                                     }));
+                                vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]);
         }
 
         token::Interpolated(_) => panic!("quote! with interpolated token"),
@@ -722,7 +710,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
 
 fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> {
     match *tt {
-        TokenTree::Token(sp, SubstNt(ident, _)) => {
+        TokenTree::Token(sp, SubstNt(ident)) => {
             // tt.extend($ident.to_tokens(ext_cx))
 
             let e_to_toks =
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 4e4c644776a51..89ecf02ee4c92 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -85,7 +85,7 @@ use codemap;
 use errors::FatalError;
 use parse::lexer::*; //resolve bug?
 use parse::ParseSess;
-use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
+use parse::parser::{PathStyle, Parser};
 use parse::token::{DocComment, MatchNt, SubstNt};
 use parse::token::{Token, Nonterminal};
 use parse::token;
@@ -216,7 +216,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
                     n_rec(p_s, next_m, res, ret_val, idx)?;
                 }
             }
-            TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => {
+            TokenTree::Token(sp, MatchNt(bind_name, _)) => {
                 match ret_val.entry(bind_name.name) {
                     Vacant(spot) => {
                         spot.insert(res[*idx].clone());
@@ -263,7 +263,7 @@ pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
 /// unhygienic comparison)
 pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
     match (t1,t2) {
-        (&token::Ident(id1,_),&token::Ident(id2,_))
+        (&token::Ident(id1),&token::Ident(id2))
         | (&token::Lifetime(id1),&token::Lifetime(id2)) =>
             id1.name == id2.name,
         _ => *t1 == *t2
@@ -451,7 +451,7 @@ pub fn parse(sess: &ParseSess,
             if (!bb_eis.is_empty() && !next_eis.is_empty())
                 || bb_eis.len() > 1 {
                 let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
-                    TokenTree::Token(_, MatchNt(bind, name, _, _)) => {
+                    TokenTree::Token(_, MatchNt(bind, name)) => {
                         format!("{} ('{}')", name, bind)
                     }
                     _ => panic!()
@@ -479,7 +479,7 @@ pub fn parse(sess: &ParseSess,
 
                 let mut ei = bb_eis.pop().unwrap();
                 match ei.top_elts.get_tt(ei.idx) {
-                    TokenTree::Token(span, MatchNt(_, ident, _, _)) => {
+                    TokenTree::Token(span, MatchNt(_, ident)) => {
                         let match_cur = ei.match_cur;
                         (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
                             parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
@@ -534,9 +534,9 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
         "ty" => token::NtTy(panictry!(p.parse_ty())),
         // this could be handled like a token, since it is one
         "ident" => match p.token {
-            token::Ident(sn,b) => {
+            token::Ident(sn) => {
                 p.bump();
-                token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}),b)
+                token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}))
             }
             _ => {
                 let token_str = pprust::token_to_string(&p.token);
@@ -546,7 +546,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
             }
         },
         "path" => {
-            token::NtPath(Box::new(panictry!(p.parse_path(LifetimeAndTypesWithoutColons))))
+            token::NtPath(Box::new(panictry!(p.parse_path(PathStyle::Type))))
         },
         "meta" => token::NtMeta(panictry!(p.parse_meta_item())),
         _ => {
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 87ab3dad50c70..41d3991aee809 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -17,7 +17,7 @@ use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
 use ext::tt::macro_parser::parse;
 use parse::lexer::new_tt_reader;
 use parse::parser::{Parser, Restrictions};
-use parse::token::{self, special_idents, gensym_ident, NtTT, Token};
+use parse::token::{self, gensym_ident, NtTT, Token};
 use parse::token::Token::*;
 use print;
 use ptr::P;
@@ -244,8 +244,8 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
     // $( $lhs:tt => $rhs:tt );+
     // ...quasiquoting this would be nice.
     // These spans won't matter, anyways
-    let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
-    let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
+    let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt"));
+    let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt"));
     let argument_gram = vec!(
         TokenTree::Sequence(DUMMY_SP,
                    Rc::new(ast::SequenceRepetition {
@@ -415,7 +415,7 @@ fn check_matcher_old<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token, on_fai
     let mut tokens = matcher.peekable();
     while let Some(token) = tokens.next() {
         last = match *token {
-            TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
+            TokenTree::Token(sp, MatchNt(ref name, ref frag_spec)) => {
                 // ii. If T is a simple NT, look ahead to the next token T' in
                 // M. If T' is in the set FOLLOW(NT), continue. Else; reject.
                 if can_be_followed_by_any(&frag_spec.name.as_str()) {
@@ -881,7 +881,7 @@ fn check_matcher_core(cx: &mut ExtCtxt,
         // Now `last` holds the complete set of NT tokens that could
         // end the sequence before SUFFIX. Check that every one works with `suffix`.
         'each_last: for &(_sp, ref t) in &last.tokens {
-            if let MatchNt(ref name, ref frag_spec, _, _) = *t {
+            if let MatchNt(ref name, ref frag_spec) = *t {
                 for &(sp, ref next_token) in &suffix_first.tokens {
                     match is_in_follow(cx, next_token, &frag_spec.name.as_str()) {
                         Err(msg) => {
@@ -917,9 +917,8 @@ fn check_matcher_core(cx: &mut ExtCtxt,
     last
 }
 
-
 fn token_can_be_followed_by_any(tok: &Token) -> bool {
-    if let &MatchNt(_, ref frag_spec, _, _) = tok {
+    if let &MatchNt(_, ref frag_spec) = tok {
         frag_can_be_followed_by_any(&frag_spec.name.as_str())
     } else {
         // (Non NT's can always be followed by anthing in matchers.)
@@ -1005,8 +1004,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
             "pat" => {
                 match *tok {
                     FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
-                    Ident(i, _) if (i.name.as_str() == "if" ||
-                                    i.name.as_str() == "in") => Ok(true),
+                    Ident(i) if (i.name.as_str() == "if" ||
+                                 i.name.as_str() == "in") => Ok(true),
                     _ => Ok(false)
                 }
             },
@@ -1014,9 +1013,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
                 match *tok {
                     OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
                     Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
-                    MatchNt(_, ref frag, _, _) if frag.name.as_str() == "block" => Ok(true),
-                    Ident(i, _) if (i.name.as_str() == "as" ||
-                                    i.name.as_str() == "where") => Ok(true),
+                    MatchNt(_, ref frag) if frag.name.as_str() == "block" => Ok(true),
+                    Ident(i) if i.name.as_str() == "as" || i.name.as_str() == "where" => Ok(true),
                     _ => Ok(false)
                 }
             },
@@ -1036,7 +1034,7 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
 
 fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> {
     debug!("has_legal_fragment_specifier({:?})", tok);
-    if let &MatchNt(_, ref frag_spec, _, _) = tok {
+    if let &MatchNt(_, ref frag_spec) = tok {
         let s = &frag_spec.name.as_str();
         if !is_legal_fragment_specifier(s) {
             return Err(s.to_string());
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index ae99fe817395f..7f53d0f412cca 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -161,7 +161,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
                 size + lockstep_iter_size(tt, r)
             })
         },
-        TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) =>
+        TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
             match lookup_cur_matched(r, name) {
                 Some(matched) => match *matched {
                     MatchedNonterminal(_) => LisUnconstrained,
@@ -186,7 +186,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
             None => (),
             Some(sp) => {
                 r.cur_span = sp;
-                r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain);
+                r.cur_tok = token::Ident(r.imported_from.unwrap());
                 return ret_val;
             },
         }
@@ -278,12 +278,12 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                 }
             }
             // FIXME #2887: think about span stuff here
-            TokenTree::Token(sp, SubstNt(ident, namep)) => {
+            TokenTree::Token(sp, SubstNt(ident)) => {
                 r.stack.last_mut().unwrap().idx += 1;
                 match lookup_cur_matched(r, ident) {
                     None => {
                         r.cur_span = sp;
-                        r.cur_tok = SubstNt(ident, namep);
+                        r.cur_tok = SubstNt(ident);
                         return ret_val;
                         // this can't be 0 length, just like TokenTree::Delimited
                     }
@@ -292,9 +292,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                             // sidestep the interpolation tricks for ident because
                             // (a) idents can be in lots of places, so it'd be a pain
                             // (b) we actually can, since it's a token.
-                            MatchedNonterminal(NtIdent(ref sn, b)) => {
+                            MatchedNonterminal(NtIdent(ref sn)) => {
                                 r.cur_span = sn.span;
-                                r.cur_tok = token::Ident(sn.node, b);
+                                r.cur_tok = token::Ident(sn.node);
                                 return ret_val;
                             }
                             MatchedNonterminal(ref other_whole_nt) => {
diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs
index d8352430eb94e..c77671d89f88f 100644
--- a/src/libsyntax/feature_gate.rs
+++ b/src/libsyntax/feature_gate.rs
@@ -1168,7 +1168,19 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
     fn visit_vis(&mut self, vis: &'v ast::Visibility) {
         let span = match *vis {
             ast::Visibility::Crate(span) => span,
-            ast::Visibility::Restricted { ref path, .. } => path.span,
+            ast::Visibility::Restricted { ref path, .. } => {
+                // Check for type parameters
+                let found_param = path.segments.iter().any(|segment| {
+                    !segment.parameters.types().is_empty() ||
+                    !segment.parameters.lifetimes().is_empty() ||
+                    !segment.parameters.bindings().is_empty()
+                });
+                if found_param {
+                    self.context.span_handler.span_err(path.span, "type or lifetime parameters \
+                                                                   in visibility path");
+                }
+                path.span
+            }
             _ => return,
         };
         self.gate_feature("pub_restricted", span, "`pub(restricted)` syntax is experimental");
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index 89451e795503f..2c325080c0c26 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -22,7 +22,7 @@ use ast::*;
 use ast;
 use attr::{ThinAttributes, ThinAttributesExt};
 use codemap::{respan, Span, Spanned};
-use parse::token;
+use parse::token::{self, keywords};
 use ptr::P;
 use util::small_vector::SmallVector;
 use util::move_map::MoveMap;
@@ -610,17 +610,11 @@ pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree
 // apply ident folder if it's an ident, apply other folds to interpolated nodes
 pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token {
     match t {
-        token::Ident(id, followed_by_colons) => {
-            token::Ident(fld.fold_ident(id), followed_by_colons)
-        }
+        token::Ident(id) => token::Ident(fld.fold_ident(id)),
         token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
         token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
-        token::SubstNt(ident, namep) => {
-            token::SubstNt(fld.fold_ident(ident), namep)
-        }
-        token::MatchNt(name, kind, namep, kindp) => {
-            token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind), namep, kindp)
-        }
+        token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
+        token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)),
         _ => t
     }
 }
@@ -664,9 +658,8 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
         token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
         token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
         token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
-        token::NtIdent(id, is_mod_name) =>
-            token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), .. *id}),
-                           is_mod_name),
+        token::NtIdent(id) =>
+            token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), ..*id})),
         token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
         token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
         token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),
@@ -1022,7 +1015,7 @@ pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, config, mut exported_mac
     let config = folder.fold_meta_items(config);
 
     let mut items = folder.fold_item(P(ast::Item {
-        ident: token::special_idents::invalid,
+        ident: keywords::Invalid.ident(),
         attrs: attrs,
         id: ast::DUMMY_NODE_ID,
         vis: ast::Visibility::Public,
@@ -1241,10 +1234,11 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span, attrs}: Expr, folder: &mu
                 ExprKind::Match(folder.fold_expr(expr),
                           arms.move_map(|x| folder.fold_arm(x)))
             }
-            ExprKind::Closure(capture_clause, decl, body) => {
+            ExprKind::Closure(capture_clause, decl, body, span) => {
                 ExprKind::Closure(capture_clause,
-                            folder.fold_fn_decl(decl),
-                            folder.fold_block(body))
+                                  folder.fold_fn_decl(decl),
+                                  folder.fold_block(body),
+                                  folder.new_span(span))
             }
             ExprKind::Block(blk) => ExprKind::Block(folder.fold_block(blk)),
             ExprKind::Assign(el, er) => {
diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs
index ca7e5729c0b7a..6cfa1e9847b88 100644
--- a/src/libsyntax/lib.rs
+++ b/src/libsyntax/lib.rs
@@ -25,6 +25,7 @@
 #![cfg_attr(not(stage0), deny(warnings))]
 
 #![feature(associated_consts)]
+#![feature(const_fn)]
 #![feature(filling_drop)]
 #![feature(libc)]
 #![feature(rustc_private)]
@@ -96,7 +97,6 @@ pub mod config;
 pub mod entry;
 pub mod feature_gate;
 pub mod fold;
-pub mod owned_slice;
 pub mod parse;
 pub mod ptr;
 pub mod show_span;
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index a5cb5c7117e21..2eda13adcb580 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -13,8 +13,7 @@ use codemap::{BytePos, CharPos, CodeMap, Pos, Span};
 use codemap;
 use errors::{FatalError, Handler, DiagnosticBuilder};
 use ext::tt::transcribe::tt_next_token;
-use parse::token::str_to_ident;
-use parse::token;
+use parse::token::{self, keywords, str_to_ident};
 use str::char_at;
 use rustc_unicode::property::Pattern_White_Space;
 
@@ -1039,11 +1038,7 @@ impl<'a> StringReader<'a> {
                     token::Underscore
                 } else {
                     // FIXME: perform NFKC normalization here. (Issue #2253)
-                    if self.curr_is(':') && self.nextch_is(':') {
-                        token::Ident(str_to_ident(string), token::ModName)
-                    } else {
-                        token::Ident(str_to_ident(string), token::Plain)
-                    }
+                    token::Ident(str_to_ident(string))
                 }
             });
         }
@@ -1231,17 +1226,11 @@ impl<'a> StringReader<'a> {
                     let keyword_checking_ident = self.with_str_from(start, |lifetime_name| {
                         str_to_ident(lifetime_name)
                     });
-                    let keyword_checking_token = &token::Ident(keyword_checking_ident,
-                                                               token::Plain);
+                    let keyword_checking_token = &token::Ident(keyword_checking_ident);
                     let last_bpos = self.last_pos;
-                    if keyword_checking_token.is_keyword(token::keywords::SelfValue) {
-                        self.err_span_(start,
-                                       last_bpos,
-                                       "invalid lifetime name: 'self is no longer a special \
-                                        lifetime");
-                    } else if keyword_checking_token.is_any_keyword() &&
-                       !keyword_checking_token.is_keyword(token::keywords::Static) {
-                        self.err_span_(start, last_bpos, "invalid lifetime name");
+                    if keyword_checking_token.is_any_keyword() &&
+                       !keyword_checking_token.is_keyword(keywords::Static) {
+                        self.err_span_(start, last_bpos, "lifetimes cannot use keyword names");
                     }
 
                     return token::Lifetime(ident);
@@ -1687,7 +1676,7 @@ mod tests {
         assert_eq!(string_reader.next_token().tok, token::Whitespace);
         let tok1 = string_reader.next_token();
         let tok2 = TokenAndSpan {
-            tok: token::Ident(id, token::Plain),
+            tok: token::Ident(id),
             sp: Span {
                 lo: BytePos(21),
                 hi: BytePos(23),
@@ -1701,7 +1690,7 @@ mod tests {
         // read another token:
         let tok3 = string_reader.next_token();
         let tok4 = TokenAndSpan {
-            tok: token::Ident(str_to_ident("main"), token::Plain),
+            tok: token::Ident(str_to_ident("main")),
             sp: Span {
                 lo: BytePos(24),
                 hi: BytePos(28),
@@ -1722,8 +1711,8 @@ mod tests {
     }
 
     // make the identifier by looking up the string in the interner
-    fn mk_ident(id: &str, style: token::IdentStyle) -> token::Token {
-        token::Ident(str_to_ident(id), style)
+    fn mk_ident(id: &str) -> token::Token {
+        token::Ident(str_to_ident(id))
     }
 
     #[test]
@@ -1731,9 +1720,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         check_tokenization(setup(&cm, &sh, "a b".to_string()),
-                           vec![mk_ident("a", token::Plain),
-                                token::Whitespace,
-                                mk_ident("b", token::Plain)]);
+                           vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
     }
 
     #[test]
@@ -1741,9 +1728,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         check_tokenization(setup(&cm, &sh, "a::b".to_string()),
-                           vec![mk_ident("a", token::ModName),
-                                token::ModSep,
-                                mk_ident("b", token::Plain)]);
+                           vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
     }
 
     #[test]
@@ -1751,10 +1736,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
-                           vec![mk_ident("a", token::Plain),
-                                token::Whitespace,
-                                token::ModSep,
-                                mk_ident("b", token::Plain)]);
+                           vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
     }
 
     #[test]
@@ -1762,10 +1744,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
-                           vec![mk_ident("a", token::ModName),
-                                token::ModSep,
-                                token::Whitespace,
-                                mk_ident("b", token::Plain)]);
+                           vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
     }
 
     #[test]
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 29b1d5b9aff06..c2050d2a8f48b 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -734,9 +734,9 @@ mod tests {
         match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
             (
                 4,
-                Some(&TokenTree::Token(_, token::Ident(name_macro_rules, token::Plain))),
+                Some(&TokenTree::Token(_, token::Ident(name_macro_rules))),
                 Some(&TokenTree::Token(_, token::Not)),
-                Some(&TokenTree::Token(_, token::Ident(name_zip, token::Plain))),
+                Some(&TokenTree::Token(_, token::Ident(name_zip))),
                 Some(&TokenTree::Delimited(_, ref macro_delimed)),
             )
             if name_macro_rules.name.as_str() == "macro_rules"
@@ -755,7 +755,7 @@ mod tests {
                             (
                                 2,
                                 Some(&TokenTree::Token(_, token::Dollar)),
-                                Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))),
+                                Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
                             if first_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
@@ -766,7 +766,7 @@ mod tests {
                             (
                                 2,
                                 Some(&TokenTree::Token(_, token::Dollar)),
-                                Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))),
+                                Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
                             if second_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
@@ -785,26 +785,17 @@ mod tests {
         let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
 
         let expected = vec![
-            TokenTree::Token(sp(0, 2),
-                         token::Ident(str_to_ident("fn"),
-                         token::IdentStyle::Plain)),
-            TokenTree::Token(sp(3, 4),
-                         token::Ident(str_to_ident("a"),
-                         token::IdentStyle::Plain)),
+            TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))),
+            TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
             TokenTree::Delimited(
                 sp(5, 14),
                 Rc::new(ast::Delimited {
                     delim: token::DelimToken::Paren,
                     open_span: sp(5, 6),
                     tts: vec![
-                        TokenTree::Token(sp(6, 7),
-                                     token::Ident(str_to_ident("b"),
-                                     token::IdentStyle::Plain)),
-                        TokenTree::Token(sp(8, 9),
-                                     token::Colon),
-                        TokenTree::Token(sp(10, 13),
-                                     token::Ident(str_to_ident("i32"),
-                                     token::IdentStyle::Plain)),
+                        TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(8, 9), token::Colon),
+                        TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
                     ],
                     close_span: sp(13, 14),
                 })),
@@ -814,11 +805,8 @@ mod tests {
                     delim: token::DelimToken::Brace,
                     open_span: sp(15, 16),
                     tts: vec![
-                        TokenTree::Token(sp(17, 18),
-                                     token::Ident(str_to_ident("b"),
-                                     token::IdentStyle::Plain)),
-                        TokenTree::Token(sp(18, 19),
-                                     token::Semi)
+                        TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(18, 19), token::Semi),
                     ],
                     close_span: sp(20, 21),
                 }))
@@ -937,7 +925,7 @@ mod tests {
                                     Abi::Rust,
                                     ast::Generics{ // no idea on either of these:
                                         lifetimes: Vec::new(),
-                                        ty_params: P::empty(),
+                                        ty_params: P::new(),
                                         where_clause: ast::WhereClause {
                                             id: ast::DUMMY_NODE_ID,
                                             predicates: Vec::new(),
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index f3d3bbd9f9905..8722fe9d79d4b 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-pub use self::PathParsingMode::*;
-
 use abi::{self, Abi};
 use ast::BareFnTy;
 use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier};
@@ -51,7 +49,7 @@ use parse::common::SeqSep;
 use parse::lexer::{Reader, TokenAndSpan};
 use parse::obsolete::{ParserObsoleteMethods, ObsoleteSyntax};
 use parse::token::{self, intern, MatchNt, SubstNt, SpecialVarNt, InternedString};
-use parse::token::{keywords, special_idents, SpecialMacroVar};
+use parse::token::{keywords, SpecialMacroVar};
 use parse::{new_sub_parser_from_file, ParseSess};
 use util::parser::{AssocOp, Fixity};
 use print::pprust;
@@ -69,26 +67,24 @@ bitflags! {
         const RESTRICTION_STMT_EXPR         = 1 << 0,
         const RESTRICTION_NO_STRUCT_LITERAL = 1 << 1,
         const NO_NONINLINE_MOD  = 1 << 2,
-        const ALLOW_MODULE_PATHS = 1 << 3,
     }
 }
 
 type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute> >);
 
-/// How to parse a path. There are four different kinds of paths, all of which
+/// How to parse a path. There are three different kinds of paths, all of which
 /// are parsed somewhat differently.
 #[derive(Copy, Clone, PartialEq)]
-pub enum PathParsingMode {
-    /// A path with no type parameters; e.g. `foo::bar::Baz`
-    NoTypesAllowed,
-    /// Same as `NoTypesAllowed`, but may end with `::{` or `::*`, which are left unparsed
-    ImportPrefix,
+pub enum PathStyle {
+    /// A path with no type parameters, e.g. `foo::bar::Baz`, used in imports or visibilities.
+    Mod,
     /// A path with a lifetime and type parameters, with no double colons
-    /// before the type parameters; e.g. `foo::bar<'a>::Baz<T>`
-    LifetimeAndTypesWithoutColons,
+    /// before the type parameters; e.g. `foo::bar<'a>::Baz<T>`, used in types.
+    /// Paths using this style can be passed into macros expecting `path` nonterminals.
+    Type,
     /// A path with a lifetime and type parameters with double colons before
-    /// the type parameters; e.g. `foo::bar::<'a>::Baz::<T>`
-    LifetimeAndTypesWithColons,
+    /// the type parameters; e.g. `foo::bar::<'a>::Baz::<T>`, used in expressions or patterns.
+    Expr,
 }
 
 /// How to parse a bound, whether to allow bound modifiers such as `?`.
@@ -292,13 +288,13 @@ impl TokenType {
         match *self {
             TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)),
             TokenType::Operator => "an operator".to_string(),
-            TokenType::Keyword(kw) => format!("`{}`", kw.to_name()),
+            TokenType::Keyword(kw) => format!("`{}`", kw.name()),
         }
     }
 }
 
-fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
-    t.is_plain_ident() || *t == token::Underscore
+fn is_ident_or_underscore(t: &token::Token) -> bool {
+    t.is_ident() || *t == token::Underscore
 }
 
 /// Information about the path to a module.
@@ -398,6 +394,17 @@ impl<'a> Parser<'a> {
         Parser::token_to_string(&self.token)
     }
 
+    pub fn this_token_descr(&self) -> String {
+        let s = self.this_token_to_string();
+        if self.token.is_strict_keyword() {
+            format!("keyword `{}`", s)
+        } else if self.token.is_reserved_keyword() {
+            format!("reserved keyword `{}`", s)
+        } else {
+            format!("`{}`", s)
+        }
+    }
+
     pub fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> {
         let token_str = Parser::token_to_string(t);
         let last_span = self.last_span;
@@ -562,12 +569,10 @@ impl<'a> Parser<'a> {
     }
 
     pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
-        if !self.restrictions.contains(Restrictions::ALLOW_MODULE_PATHS) {
-            self.check_strict_keywords();
-        }
+        self.check_strict_keywords();
         self.check_reserved_keywords();
         match self.token {
-            token::Ident(i, _) => {
+            token::Ident(i) => {
                 self.bump();
                 Ok(i)
             }
@@ -585,12 +590,9 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub fn parse_ident_or_self_type(&mut self) -> PResult<'a, ast::Ident> {
-        if self.is_self_type_ident() {
-            self.expect_self_type_ident()
-        } else {
-            self.parse_ident()
-        }
+    fn parse_ident_into_path(&mut self) -> PResult<'a, ast::Path> {
+        let ident = self.parse_ident()?;
+        Ok(ast::Path::from_ident(self.last_span, ident))
     }
 
     /// Check if the next token is `tok`, and return `true` if so.
@@ -637,9 +639,8 @@ impl<'a> Parser<'a> {
     }
 
     pub fn check_contextual_keyword(&mut self, ident: Ident) -> bool {
-        let tok = token::Ident(ident, token::Plain);
-        self.expected_tokens.push(TokenType::Token(tok));
-        if let token::Ident(ref cur_ident, _) = self.token {
+        self.expected_tokens.push(TokenType::Token(token::Ident(ident)));
+        if let token::Ident(ref cur_ident) = self.token {
             cur_ident.name == ident.name
         } else {
             false
@@ -1159,7 +1160,7 @@ impl<'a> Parser<'a> {
             let other_bounds = if self.eat(&token::BinOp(token::Plus)) {
                 self.parse_ty_param_bounds(BoundParsingMode::Bare)?
             } else {
-                P::empty()
+                P::new()
             };
             let all_bounds =
                 Some(TraitTyParamBound(poly_trait_ref, TraitBoundModifier::None)).into_iter()
@@ -1170,7 +1171,7 @@ impl<'a> Parser<'a> {
     }
 
     pub fn parse_ty_path(&mut self) -> PResult<'a, TyKind> {
-        Ok(TyKind::Path(None, self.parse_path(LifetimeAndTypesWithoutColons)?))
+        Ok(TyKind::Path(None, self.parse_path(PathStyle::Type)?))
     }
 
     /// parse a TyKind::BareFn type:
@@ -1473,13 +1474,11 @@ impl<'a> Parser<'a> {
         } else if self.eat_lt() {
 
             let (qself, path) =
-                 self.parse_qualified_path(NoTypesAllowed)?;
+                 self.parse_qualified_path(PathStyle::Type)?;
 
             TyKind::Path(Some(qself), path)
-        } else if self.check(&token::ModSep) ||
-                  self.token.is_ident() ||
-                  self.token.is_path() {
-            let path = self.parse_path(LifetimeAndTypesWithoutColons)?;
+        } else if self.token.is_path_start() {
+            let path = self.parse_path(PathStyle::Type)?;
             if self.check(&token::Not) {
                 // MACRO INVOCATION
                 self.bump();
@@ -1497,9 +1496,8 @@ impl<'a> Parser<'a> {
             // TYPE TO BE INFERRED
             TyKind::Infer
         } else {
-            let this_token_str = self.this_token_to_string();
-            let msg = format!("expected type, found `{}`", this_token_str);
-            return Err(self.fatal(&msg[..]));
+            let msg = format!("expected type, found {}", self.this_token_descr());
+            return Err(self.fatal(&msg));
         };
 
         let sp = mk_sp(lo, self.last_span.hi);
@@ -1541,10 +1539,10 @@ impl<'a> Parser<'a> {
         debug!("parser is_named_argument offset:{}", offset);
 
         if offset == 0 {
-            is_plain_ident_or_underscore(&self.token)
+            is_ident_or_underscore(&self.token)
                 && self.look_ahead(1, |t| *t == token::Colon)
         } else {
-            self.look_ahead(offset, |t| is_plain_ident_or_underscore(t))
+            self.look_ahead(offset, |t| is_ident_or_underscore(t))
                 && self.look_ahead(offset + 1, |t| *t == token::Colon)
         }
     }
@@ -1564,7 +1562,7 @@ impl<'a> Parser<'a> {
         } else {
             debug!("parse_arg_general ident_to_pat");
             let sp = self.last_span;
-            let spanned = Spanned { span: sp, node: special_idents::invalid };
+            let spanned = Spanned { span: sp, node: keywords::Invalid.ident() };
             P(Pat {
                 id: ast::DUMMY_NODE_ID,
                 node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable),
@@ -1616,12 +1614,12 @@ impl<'a> Parser<'a> {
     }
 
     /// Matches token_lit = LIT_INTEGER | ...
-    pub fn lit_from_token(&self, tok: &token::Token) -> PResult<'a, LitKind> {
-        match *tok {
+    pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
+        let out = match self.token {
             token::Interpolated(token::NtExpr(ref v)) => {
                 match v.node {
-                    ExprKind::Lit(ref lit) => { Ok(lit.node.clone()) }
-                    _ => { return self.unexpected_last(tok); }
+                    ExprKind::Lit(ref lit) => { lit.node.clone() }
+                    _ => { return self.unexpected_last(&self.token); }
                 }
             }
             token::Literal(lit, suf) => {
@@ -1636,13 +1634,13 @@ impl<'a> Parser<'a> {
                         (false, parse::integer_lit(&s.as_str(),
                                                    suf.as_ref().map(|s| s.as_str()),
                                                    &self.sess.span_diagnostic,
-                                                   self.last_span))
+                                                   self.span))
                     }
                     token::Float(s) => {
                         (false, parse::float_lit(&s.as_str(),
                                                  suf.as_ref().map(|s| s.as_str()),
                                                   &self.sess.span_diagnostic,
-                                                 self.last_span))
+                                                 self.span))
                     }
 
                     token::Str_(s) => {
@@ -1664,14 +1662,17 @@ impl<'a> Parser<'a> {
                 };
 
                 if suffix_illegal {
-                    let sp = self.last_span;
+                    let sp = self.span;
                     self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf)
                 }
 
-                Ok(out)
+                out
             }
-            _ => { return self.unexpected_last(tok); }
-        }
+            _ => { return self.unexpected_last(&self.token); }
+        };
+
+        self.bump();
+        Ok(out)
     }
 
     /// Matches lit = true | false | token_lit
@@ -1682,8 +1683,7 @@ impl<'a> Parser<'a> {
         } else if self.eat_keyword(keywords::False) {
             LitKind::Bool(false)
         } else {
-            let token = self.bump_and_get();
-            let lit = self.lit_from_token(&token)?;
+            let lit = self.parse_lit_token()?;
             lit
         };
         Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.last_span.hi) })
@@ -1707,6 +1707,16 @@ impl<'a> Parser<'a> {
         }
     }
 
+    pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
+        match self.token {
+            token::Ident(sid) if self.token.is_path_segment_keyword() => {
+                self.bump();
+                Ok(sid)
+            }
+            _ => self.parse_ident(),
+         }
+     }
+
     /// Parses qualified path.
     ///
     /// Assumes that the leading `<` has been parsed already.
@@ -1722,12 +1732,12 @@ impl<'a> Parser<'a> {
     ///
     /// `<T as U>::a`
     /// `<T as U>::F::a::<S>`
-    pub fn parse_qualified_path(&mut self, mode: PathParsingMode)
+    pub fn parse_qualified_path(&mut self, mode: PathStyle)
                                 -> PResult<'a, (QSelf, ast::Path)> {
         let span = self.last_span;
         let self_type = self.parse_ty_sum()?;
         let mut path = if self.eat_keyword(keywords::As) {
-            self.parse_path(LifetimeAndTypesWithoutColons)?
+            self.parse_path(PathStyle::Type)?
         } else {
             ast::Path {
                 span: span,
@@ -1745,14 +1755,14 @@ impl<'a> Parser<'a> {
         self.expect(&token::ModSep)?;
 
         let segments = match mode {
-            LifetimeAndTypesWithoutColons => {
+            PathStyle::Type => {
                 self.parse_path_segments_without_colons()?
             }
-            LifetimeAndTypesWithColons => {
+            PathStyle::Expr => {
                 self.parse_path_segments_with_colons()?
             }
-            NoTypesAllowed | ImportPrefix => {
-                self.parse_path_segments_without_types(mode == ImportPrefix)?
+            PathStyle::Mod => {
+                self.parse_path_segments_without_types()?
             }
         };
         path.segments.extend(segments);
@@ -1766,7 +1776,7 @@ impl<'a> Parser<'a> {
     /// mode. The `mode` parameter determines whether lifetimes, types, and/or
     /// bounds are permitted and whether `::` must precede type parameter
     /// groups.
-    pub fn parse_path(&mut self, mode: PathParsingMode) -> PResult<'a, ast::Path> {
+    pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> {
         // Check for a whole path...
         let found = match self.token {
             token::Interpolated(token::NtPath(_)) => Some(self.bump_and_get()),
@@ -1783,14 +1793,14 @@ impl<'a> Parser<'a> {
         // identifier followed by an optional lifetime and a set of types.
         // A bound set is a set of type parameter bounds.
         let segments = match mode {
-            LifetimeAndTypesWithoutColons => {
+            PathStyle::Type => {
                 self.parse_path_segments_without_colons()?
             }
-            LifetimeAndTypesWithColons => {
+            PathStyle::Expr => {
                 self.parse_path_segments_with_colons()?
             }
-            NoTypesAllowed | ImportPrefix => {
-                self.parse_path_segments_without_types(mode == ImportPrefix)?
+            PathStyle::Mod => {
+                self.parse_path_segments_without_types()?
             }
         };
 
@@ -1813,7 +1823,7 @@ impl<'a> Parser<'a> {
         let mut segments = Vec::new();
         loop {
             // First, parse an identifier.
-            let identifier = self.parse_ident_or_self_type()?;
+            let identifier = self.parse_path_segment_ident()?;
 
             // Parse types, optionally.
             let parameters = if self.eat_lt() {
@@ -1866,7 +1876,7 @@ impl<'a> Parser<'a> {
         let mut segments = Vec::new();
         loop {
             // First, parse an identifier.
-            let identifier = self.parse_ident_or_self_type()?;
+            let identifier = self.parse_path_segment_ident()?;
 
             // If we do not see a `::`, stop.
             if !self.eat(&token::ModSep) {
@@ -1905,15 +1915,14 @@ impl<'a> Parser<'a> {
         }
     }
 
-
     /// Examples:
     /// - `a::b::c`
-    pub fn parse_path_segments_without_types(&mut self, import_prefix: bool)
+    pub fn parse_path_segments_without_types(&mut self)
                                              -> PResult<'a, Vec<ast::PathSegment>> {
         let mut segments = Vec::new();
         loop {
             // First, parse an identifier.
-            let identifier = self.parse_ident_or_self_type()?;
+            let identifier = self.parse_path_segment_ident()?;
 
             // Assemble and push the result.
             segments.push(ast::PathSegment {
@@ -1922,7 +1931,7 @@ impl<'a> Parser<'a> {
             });
 
             // If we do not see a `::` or see `::{`/`::*`, stop.
-            if !self.check(&token::ModSep) || import_prefix && self.is_import_coupler() {
+            if !self.check(&token::ModSep) || self.is_import_coupler() {
                 return Ok(segments);
             } else {
                 self.bump();
@@ -2212,15 +2221,6 @@ impl<'a> Parser<'a> {
                 let lo = self.span.lo;
                 return self.parse_lambda_expr(lo, CaptureBy::Ref, attrs);
             },
-            token::Ident(id @ ast::Ident {
-                            name: token::SELF_KEYWORD_NAME,
-                            ctxt: _
-                         }, token::Plain) => {
-                self.bump();
-                let path = ast::Path::from_ident(mk_sp(lo, hi), id);
-                ex = ExprKind::Path(None, path);
-                hi = self.last_span.hi;
-            }
             token::OpenDelim(token::Bracket) => {
                 self.bump();
 
@@ -2263,7 +2263,7 @@ impl<'a> Parser<'a> {
             _ => {
                 if self.eat_lt() {
                     let (qself, path) =
-                        self.parse_qualified_path(LifetimeAndTypesWithColons)?;
+                        self.parse_qualified_path(PathStyle::Expr)?;
                     hi = path.span.hi;
                     return Ok(self.mk_expr(lo, hi, ExprKind::Path(Some(qself), path), attrs));
                 }
@@ -2350,12 +2350,8 @@ impl<'a> Parser<'a> {
                     let mut db = self.fatal("expected expression, found statement (`let`)");
                     db.note("variable declaration using `let` is a statement");
                     return Err(db);
-                } else if self.check(&token::ModSep) ||
-                        self.token.is_ident() &&
-                        !self.check_keyword(keywords::True) &&
-                        !self.check_keyword(keywords::False) {
-                    let pth =
-                        self.parse_path(LifetimeAndTypesWithColons)?;
+                } else if self.token.is_path_start() {
+                    let pth = self.parse_path(PathStyle::Expr)?;
 
                     // `!`, as an operator, is prefix, so we know this isn't that
                     if self.check(&token::Not) {
@@ -2435,10 +2431,18 @@ impl<'a> Parser<'a> {
                     hi = pth.span.hi;
                     ex = ExprKind::Path(None, pth);
                 } else {
-                    // other literal expression
-                    let lit = self.parse_lit()?;
-                    hi = lit.span.hi;
-                    ex = ExprKind::Lit(P(lit));
+                    match self.parse_lit() {
+                        Ok(lit) => {
+                            hi = lit.span.hi;
+                            ex = ExprKind::Lit(P(lit));
+                        }
+                        Err(mut err) => {
+                            err.cancel();
+                            let msg = format!("expected expression, found {}",
+                                              self.this_token_descr());
+                            return Err(self.fatal(&msg));
+                        }
+                    }
                 }
             }
         }
@@ -2577,7 +2581,7 @@ impl<'a> Parser<'a> {
             // expr.f
             if self.eat(&token::Dot) {
                 match self.token {
-                  token::Ident(i, _) => {
+                  token::Ident(i) => {
                     let dot_pos = self.last_span.hi;
                     hi = self.span.hi;
                     self.bump();
@@ -2632,7 +2636,7 @@ impl<'a> Parser<'a> {
                     self.span_err(self.span, &format!("unexpected token: `{}`", actual));
 
                     let dot_pos = self.last_span.hi;
-                    e = self.parse_dot_suffix(special_idents::invalid,
+                    e = self.parse_dot_suffix(keywords::Invalid.ident(),
                                               mk_sp(dot_pos, dot_pos),
                                               e, lo)?;
                   }
@@ -2674,7 +2678,7 @@ impl<'a> Parser<'a> {
     // Parse unquoted tokens after a `$` in a token tree
     fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
         let mut sp = self.span;
-        let (name, namep) = match self.token {
+        let name = match self.token {
             token::Dollar => {
                 self.bump();
 
@@ -2694,40 +2698,36 @@ impl<'a> Parser<'a> {
                                           op: repeat,
                                           num_captures: name_num
                                       })));
-                } else if self.token.is_keyword_allow_following_colon(keywords::Crate) {
+                } else if self.token.is_keyword(keywords::Crate) {
                     self.bump();
                     return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
                 } else {
                     sp = mk_sp(sp.lo, self.span.hi);
-                    let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain };
-                    let name = self.parse_ident()?;
-                    (name, namep)
+                    self.parse_ident()?
                 }
             }
-            token::SubstNt(name, namep) => {
+            token::SubstNt(name) => {
                 self.bump();
-                (name, namep)
+                name
             }
             _ => unreachable!()
         };
         // continue by trying to parse the `:ident` after `$name`
-        if self.token == token::Colon && self.look_ahead(1, |t| t.is_ident() &&
-                                                                !t.is_strict_keyword() &&
-                                                                !t.is_reserved_keyword()) {
+        if self.token == token::Colon &&
+                self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) {
             self.bump();
             sp = mk_sp(sp.lo, self.span.hi);
-            let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain };
             let nt_kind = self.parse_ident()?;
-            Ok(TokenTree::Token(sp, MatchNt(name, nt_kind, namep, kindp)))
+            Ok(TokenTree::Token(sp, MatchNt(name, nt_kind)))
         } else {
-            Ok(TokenTree::Token(sp, SubstNt(name, namep)))
+            Ok(TokenTree::Token(sp, SubstNt(name)))
         }
     }
 
     pub fn check_unknown_macro_variable(&mut self) {
         if self.quote_depth == 0 {
             match self.token {
-                token::SubstNt(name, _) =>
+                token::SubstNt(name) =>
                     self.fatal(&format!("unknown macro variable `{}`", name)).emit(),
                 _ => {}
             }
@@ -3225,13 +3225,15 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(lo, hi, ExprKind::IfLet(pat, expr, thn, els), attrs))
     }
 
-    // `|args| expr`
-    pub fn parse_lambda_expr(&mut self, lo: BytePos,
+    // `move |args| expr`
+    pub fn parse_lambda_expr(&mut self,
+                             lo: BytePos,
                              capture_clause: CaptureBy,
                              attrs: ThinAttributes)
                              -> PResult<'a, P<Expr>>
     {
         let decl = self.parse_fn_block_decl()?;
+        let decl_hi = self.last_span.hi;
         let body = match decl.output {
             FunctionRetTy::Default(_) => {
                 // If no explicit return type is given, parse any
@@ -3255,7 +3257,8 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr(
             lo,
             body.span.hi,
-            ExprKind::Closure(capture_clause, decl, body), attrs))
+            ExprKind::Closure(capture_clause, decl, body, mk_sp(lo, decl_hi)),
+            attrs))
     }
 
     // `else` token already eaten
@@ -3587,16 +3590,16 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
-        if self.is_path_start() {
+        if self.token.is_path_start() {
             let lo = self.span.lo;
             let (qself, path) = if self.eat_lt() {
                 // Parse a qualified path
                 let (qself, path) =
-                    self.parse_qualified_path(NoTypesAllowed)?;
+                    self.parse_qualified_path(PathStyle::Expr)?;
                 (Some(qself), path)
             } else {
                 // Parse an unqualified path
-                (None, self.parse_path(LifetimeAndTypesWithColons)?)
+                (None, self.parse_path(PathStyle::Expr)?)
             };
             let hi = self.last_span.hi;
             Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), None))
@@ -3605,12 +3608,6 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn is_path_start(&self) -> bool {
-        (self.token == token::Lt || self.token == token::ModSep
-            || self.token.is_ident() || self.token.is_path())
-            && !self.token.is_keyword(keywords::True) && !self.token.is_keyword(keywords::False)
-    }
-
     /// Parse a pattern.
     pub fn parse_pat(&mut self) -> PResult<'a, P<Pat>> {
         maybe_whole!(self, NtPat);
@@ -3661,19 +3658,16 @@ impl<'a> Parser<'a> {
                 // Parse box pat
                 let subpat = self.parse_pat()?;
                 pat = PatKind::Box(subpat);
-            } else if self.is_path_start() {
+            } else if self.token.is_path_start() {
                 // Parse pattern starting with a path
-                if self.token.is_plain_ident() && self.look_ahead(1, |t| *t != token::DotDotDot &&
+                if self.token.is_ident() && self.look_ahead(1, |t| *t != token::DotDotDot &&
                         *t != token::OpenDelim(token::Brace) &&
                         *t != token::OpenDelim(token::Paren) &&
-                        // Contrary to its definition, a plain ident can be followed by :: in macros
                         *t != token::ModSep) {
                     // Plain idents have some extra abilities here compared to general paths
                     if self.look_ahead(1, |t| *t == token::Not) {
                         // Parse macro invocation
-                        let ident = self.parse_ident()?;
-                        let ident_span = self.last_span;
-                        let path = ast::Path::from_ident(ident_span, ident);
+                        let path = self.parse_ident_into_path()?;
                         self.bump();
                         let delim = self.expect_open_delim()?;
                         let tts = self.parse_seq_to_end(
@@ -3693,11 +3687,11 @@ impl<'a> Parser<'a> {
                     let (qself, path) = if self.eat_lt() {
                         // Parse a qualified path
                         let (qself, path) =
-                            self.parse_qualified_path(NoTypesAllowed)?;
+                            self.parse_qualified_path(PathStyle::Expr)?;
                         (Some(qself), path)
                     } else {
                         // Parse an unqualified path
-                        (None, self.parse_path(LifetimeAndTypesWithColons)?)
+                        (None, self.parse_path(PathStyle::Expr)?)
                     };
                     match self.token {
                       token::DotDotDot => {
@@ -3754,12 +3748,20 @@ impl<'a> Parser<'a> {
                 }
             } else {
                 // Try to parse everything else as literal with optional minus
-                let begin = self.parse_pat_literal_maybe_minus()?;
-                if self.eat(&token::DotDotDot) {
-                    let end = self.parse_pat_range_end()?;
-                    pat = PatKind::Range(begin, end);
-                } else {
-                    pat = PatKind::Lit(begin);
+                match self.parse_pat_literal_maybe_minus() {
+                    Ok(begin) => {
+                        if self.eat(&token::DotDotDot) {
+                            let end = self.parse_pat_range_end()?;
+                            pat = PatKind::Range(begin, end);
+                        } else {
+                            pat = PatKind::Lit(begin);
+                        }
+                    }
+                    Err(mut err) => {
+                        err.cancel();
+                        let msg = format!("expected pattern, found {}", self.this_token_descr());
+                        return Err(self.fatal(&msg));
+                    }
                 }
             }
           }
@@ -3956,11 +3958,11 @@ impl<'a> Parser<'a> {
 
             // Potential trouble: if we allow macros with paths instead of
             // idents, we'd need to look ahead past the whole path here...
-            let pth = self.parse_path(NoTypesAllowed)?;
+            let pth = self.parse_ident_into_path()?;
             self.bump();
 
             let id = match self.token {
-                token::OpenDelim(_) => token::special_idents::invalid, // no special identifier
+                token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier
                 _ => self.parse_ident()?,
             };
 
@@ -3972,7 +3974,7 @@ impl<'a> Parser<'a> {
                 _ => {
                     // we only expect an ident if we didn't parse one
                     // above.
-                    let ident_str = if id.name == token::special_idents::invalid.name {
+                    let ident_str = if id.name == keywords::Invalid.name() {
                         "identifier, "
                     } else {
                         ""
@@ -3998,7 +4000,7 @@ impl<'a> Parser<'a> {
                 MacStmtStyle::NoBraces
             };
 
-            if id.name == token::special_idents::invalid.name {
+            if id.name == keywords::Invalid.name() {
                 let mac = P(spanned(lo, hi, Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT }));
                 let stmt = StmtKind::Mac(mac, style, attrs.into_thin_attrs());
                 spanned(lo, hi, stmt)
@@ -4240,7 +4242,7 @@ impl<'a> Parser<'a> {
                                         -> PResult<'a, TyParamBounds>
     {
         if !self.eat(&token::Colon) {
-            Ok(P::empty())
+            Ok(P::new())
         } else {
             self.parse_ty_param_bounds(mode)
         }
@@ -4626,17 +4628,12 @@ impl<'a> Parser<'a> {
         }))
     }
 
-    fn is_self_ident(&mut self) -> bool {
-        match self.token {
-          token::Ident(id, token::Plain) => id.name == special_idents::self_.name,
-          _ => false
-        }
-    }
-
     fn expect_self_ident(&mut self) -> PResult<'a, ast::Ident> {
         match self.token {
-            token::Ident(id, token::Plain) if id.name == special_idents::self_.name => {
+            token::Ident(id) if id.name == keywords::SelfValue.name() => {
                 self.bump();
+                // The hygiene context of `id` needs to be preserved here,
+                // so we can't just return `SelfValue.ident()`.
                 Ok(id)
             },
             _ => {
@@ -4647,27 +4644,6 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn is_self_type_ident(&mut self) -> bool {
-        match self.token {
-          token::Ident(id, token::Plain) => id.name == special_idents::type_self.name,
-          _ => false
-        }
-    }
-
-    fn expect_self_type_ident(&mut self) -> PResult<'a, ast::Ident> {
-        match self.token {
-            token::Ident(id, token::Plain) if id.name == special_idents::type_self.name => {
-                self.bump();
-                Ok(id)
-            },
-            _ => {
-                let token_str = self.this_token_to_string();
-                Err(self.fatal(&format!("expected `Self`, found `{}`",
-                                   token_str)))
-            }
-        }
-    }
-
     /// Parse the argument list and result type of a function
     /// that may have a self type.
     fn parse_fn_decl_with_self<F>(&mut self,
@@ -4736,16 +4712,16 @@ impl<'a> Parser<'a> {
                 } else {
                     Mutability::Immutable
                 };
-                if self.is_self_ident() {
+                if self.token.is_keyword(keywords::SelfValue) {
                     let span = self.span;
                     self.span_err(span, "cannot pass self by raw pointer");
                     self.bump();
                 }
                 // error case, making bogus self ident:
-                SelfKind::Value(special_idents::self_)
+                SelfKind::Value(keywords::SelfValue.ident())
             }
             token::Ident(..) => {
-                if self.is_self_ident() {
+                if self.token.is_keyword(keywords::SelfValue) {
                     let self_ident = self.expect_self_ident()?;
 
                     // Determine whether this is the fully explicit form, `self:
@@ -4969,7 +4945,7 @@ impl<'a> Parser<'a> {
             Visibility::Inherited => (),
             _ => {
                 let is_macro_rules: bool = match self.token {
-                    token::Ident(sid, _) => sid.name == intern("macro_rules"),
+                    token::Ident(sid) => sid.name == intern("macro_rules"),
                     _ => false,
                 };
                 if is_macro_rules {
@@ -5002,7 +4978,7 @@ impl<'a> Parser<'a> {
             self.complain_if_pub_macro(&vis, last_span);
 
             let lo = self.span.lo;
-            let pth = self.parse_path(NoTypesAllowed)?;
+            let pth = self.parse_ident_into_path()?;
             self.expect(&token::Not)?;
 
             // eat a matched-delimiter token tree:
@@ -5017,7 +4993,7 @@ impl<'a> Parser<'a> {
             if delim != token::Brace {
                 self.expect(&token::Semi)?
             }
-            Ok((token::special_idents::invalid, vec![], ast::ImplItemKind::Macro(m)))
+            Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(m)))
         } else {
             let (constness, unsafety, abi) = self.parse_fn_front_matter()?;
             let ident = self.parse_ident()?;
@@ -5112,7 +5088,7 @@ impl<'a> Parser<'a> {
 
             self.expect(&token::OpenDelim(token::Brace))?;
             self.expect(&token::CloseDelim(token::Brace))?;
-            Ok((special_idents::invalid,
+            Ok((keywords::Invalid.ident(),
              ItemKind::DefaultImpl(unsafety, opt_trait.unwrap()), None))
         } else {
             if opt_trait.is_some() {
@@ -5128,7 +5104,7 @@ impl<'a> Parser<'a> {
                 impl_items.push(self.parse_impl_item()?);
             }
 
-            Ok((special_idents::invalid,
+            Ok((keywords::Invalid.ident(),
              ItemKind::Impl(unsafety, polarity, generics, opt_trait, ty, impl_items),
              Some(attrs)))
         }
@@ -5137,7 +5113,7 @@ impl<'a> Parser<'a> {
     /// Parse a::B<String,i32>
     fn parse_trait_ref(&mut self) -> PResult<'a, TraitRef> {
         Ok(ast::TraitRef {
-            path: self.parse_path(LifetimeAndTypesWithoutColons)?,
+            path: self.parse_path(PathStyle::Type)?,
             ref_id: ast::DUMMY_NODE_ID,
         })
     }
@@ -5297,8 +5273,7 @@ impl<'a> Parser<'a> {
             self.expect(&token::CloseDelim(token::Paren))?;
             Ok(Visibility::Crate(span))
         } else {
-            let path = self.with_res(Restrictions::ALLOW_MODULE_PATHS,
-                                     |this| this.parse_path(NoTypesAllowed))?;
+            let path = self.parse_path(PathStyle::Mod)?;
             self.expect(&token::CloseDelim(token::Paren))?;
             Ok(Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID })
         }
@@ -5306,7 +5281,7 @@ impl<'a> Parser<'a> {
 
     /// Parse defaultness: DEFAULT or nothing
     fn parse_defaultness(&mut self) -> PResult<'a, Defaultness> {
-        if self.eat_contextual_keyword(special_idents::DEFAULT) {
+        if self.eat_contextual_keyword(keywords::Default.ident()) {
             Ok(Defaultness::Default)
         } else {
             Ok(Defaultness::Final)
@@ -5634,7 +5609,7 @@ impl<'a> Parser<'a> {
         };
         Ok(self.mk_item(lo,
                      last_span.hi,
-                     special_idents::invalid,
+                     keywords::Invalid.ident(),
                      ItemKind::ForeignMod(m),
                      visibility,
                      attrs))
@@ -5773,7 +5748,7 @@ impl<'a> Parser<'a> {
             let last_span = self.last_span;
             let item = self.mk_item(lo,
                                     last_span.hi,
-                                    token::special_idents::invalid,
+                                    keywords::Invalid.ident(),
                                     item_,
                                     visibility,
                                     attrs);
@@ -6044,7 +6019,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, Option<P<Item>>> {
         if macros_allowed && !self.token.is_any_keyword()
                 && self.look_ahead(1, |t| *t == token::Not)
-                && (self.look_ahead(2, |t| t.is_plain_ident())
+                && (self.look_ahead(2, |t| t.is_ident())
                     || self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
                     || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
             // MACRO INVOCATION ITEM
@@ -6055,16 +6030,16 @@ impl<'a> Parser<'a> {
             let mac_lo = self.span.lo;
 
             // item macro.
-            let pth = self.parse_path(NoTypesAllowed)?;
+            let pth = self.parse_ident_into_path()?;
             self.expect(&token::Not)?;
 
             // a 'special' identifier (like what `macro_rules!` uses)
             // is optional. We should eventually unify invoc syntax
             // and remove this.
-            let id = if self.token.is_plain_ident() {
+            let id = if self.token.is_ident() {
                 self.parse_ident()?
             } else {
-                token::special_idents::invalid // no special identifier
+                keywords::Invalid.ident() // no special identifier
             };
             // eat a matched-delimiter token tree:
             let delim = self.expect_open_delim()?;
@@ -6161,7 +6136,7 @@ impl<'a> Parser<'a> {
             let items = self.parse_path_list_items()?;
             Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items))))
         } else {
-            let prefix = self.parse_path(ImportPrefix)?;
+            let prefix = self.parse_path(PathStyle::Mod)?;
             if self.is_import_coupler() {
                 // `foo::bar::{a, b}` or `foo::bar::*`
                 self.bump();
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 16417ac004461..fcb6c3539db59 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -11,7 +11,6 @@
 pub use self::BinOpToken::*;
 pub use self::Nonterminal::*;
 pub use self::DelimToken::*;
-pub use self::IdentStyle::*;
 pub use self::Lit::*;
 pub use self::Token::*;
 
@@ -26,7 +25,6 @@ use std::fmt;
 use std::ops::Deref;
 use std::rc::Rc;
 
-#[allow(non_camel_case_types)]
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
 pub enum BinOpToken {
     Plus,
@@ -52,13 +50,6 @@ pub enum DelimToken {
     Brace,
 }
 
-#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
-pub enum IdentStyle {
-    /// `::` follows the identifier with no whitespace in-between.
-    ModName,
-    Plain,
-}
-
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
 pub enum SpecialMacroVar {
     /// `$crate` will be filled in with the name of the crate a macro was
@@ -99,7 +90,6 @@ impl Lit {
     }
 }
 
-#[allow(non_camel_case_types)]
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug)]
 pub enum Token {
     /* Expression-operator symbols. */
@@ -141,7 +131,7 @@ pub enum Token {
     Literal(Lit, Option<ast::Name>),
 
     /* Name components */
-    Ident(ast::Ident, IdentStyle),
+    Ident(ast::Ident),
     Underscore,
     Lifetime(ast::Ident),
 
@@ -151,11 +141,11 @@ pub enum Token {
     /// Doc comment
     DocComment(ast::Name),
     // In left-hand-sides of MBE macros:
-    /// Parse a nonterminal (name to bind, name of NT, styles of their idents)
-    MatchNt(ast::Ident, ast::Ident, IdentStyle, IdentStyle),
+    /// Parse a nonterminal (name to bind, name of NT)
+    MatchNt(ast::Ident, ast::Ident),
     // In right-hand-sides of MBE macros:
     /// A syntactic variable that will be filled in by macro expansion.
-    SubstNt(ast::Ident, IdentStyle),
+    SubstNt(ast::Ident),
     /// A macro variable with special meaning.
     SpecialVarNt(SpecialMacroVar),
 
@@ -185,7 +175,7 @@ impl Token {
     pub fn can_begin_expr(&self) -> bool {
         match *self {
             OpenDelim(_)                => true,
-            Ident(_, _)                 => true,
+            Ident(..)                   => true,
             Underscore                  => true,
             Tilde                       => true,
             Literal(_, _)               => true,
@@ -218,7 +208,7 @@ impl Token {
     /// Returns `true` if the token is an identifier.
     pub fn is_ident(&self) -> bool {
         match *self {
-            Ident(_, _) => true,
+            Ident(..)   => true,
             _           => false,
         }
     }
@@ -239,16 +229,6 @@ impl Token {
         }
     }
 
-    /// Returns `true` if the token is a path that is not followed by a `::`
-    /// token.
-    #[allow(non_upper_case_globals)]
-    pub fn is_plain_ident(&self) -> bool {
-        match *self {
-            Ident(_, Plain) => true,
-            _               => false,
-        }
-    }
-
     /// Returns `true` if the token is a lifetime.
     pub fn is_lifetime(&self) -> bool {
         match *self {
@@ -263,6 +243,11 @@ impl Token {
         self.is_keyword(keywords::Const)
     }
 
+    pub fn is_path_start(&self) -> bool {
+        self == &ModSep || self == &Lt || self.is_path() ||
+        self.is_path_segment_keyword() || self.is_ident() && !self.is_any_keyword()
+    }
+
     /// Maps a token to its corresponding binary operator.
     pub fn to_binop(&self) -> Option<BinOpKind> {
         match *self {
@@ -289,77 +274,41 @@ impl Token {
     }
 
     /// Returns `true` if the token is a given keyword, `kw`.
-    #[allow(non_upper_case_globals)]
     pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
         match *self {
-            Ident(sid, Plain) => kw.to_name() == sid.name,
-            _                      => false,
+            Ident(id) => id.name == kw.name(),
+            _ => false,
         }
     }
 
-    pub fn is_keyword_allow_following_colon(&self, kw: keywords::Keyword) -> bool {
+    pub fn is_path_segment_keyword(&self) -> bool {
         match *self {
-            Ident(sid, _) => { kw.to_name() == sid.name }
-            _ => { false }
+            Ident(id) => id.name == keywords::Super.name() ||
+                         id.name == keywords::SelfValue.name() ||
+                         id.name == keywords::SelfType.name(),
+            _ => false,
         }
     }
 
-    /// Returns `true` if the token is either a special identifier, or a strict
-    /// or reserved keyword.
-    #[allow(non_upper_case_globals)]
+    /// Returns `true` if the token is either a strict or reserved keyword.
     pub fn is_any_keyword(&self) -> bool {
-        match *self {
-            Ident(sid, Plain) => {
-                let n = sid.name;
-
-                   n == SELF_KEYWORD_NAME
-                || n == STATIC_KEYWORD_NAME
-                || n == SUPER_KEYWORD_NAME
-                || n == SELF_TYPE_KEYWORD_NAME
-                || STRICT_KEYWORD_START <= n
-                && n <= RESERVED_KEYWORD_FINAL
-            },
-            _ => false
-        }
+        self.is_strict_keyword() || self.is_reserved_keyword()
     }
 
-    /// Returns `true` if the token may not appear as an identifier.
-    #[allow(non_upper_case_globals)]
+    /// Returns `true` if the token is a strict keyword.
     pub fn is_strict_keyword(&self) -> bool {
         match *self {
-            Ident(sid, Plain) => {
-                let n = sid.name;
-
-                   n == SELF_KEYWORD_NAME
-                || n == STATIC_KEYWORD_NAME
-                || n == SUPER_KEYWORD_NAME
-                || n == SELF_TYPE_KEYWORD_NAME
-                || STRICT_KEYWORD_START <= n
-                && n <= STRICT_KEYWORD_FINAL
-            },
-            Ident(sid, ModName) => {
-                let n = sid.name;
-
-                   n != SELF_KEYWORD_NAME
-                && n != SUPER_KEYWORD_NAME
-                && STRICT_KEYWORD_START <= n
-                && n <= STRICT_KEYWORD_FINAL
-            }
+            Ident(id) => id.name >= keywords::As.name() &&
+                         id.name <= keywords::While.name(),
             _ => false,
         }
     }
 
-    /// Returns `true` if the token is a keyword that has been reserved for
-    /// possible future use.
-    #[allow(non_upper_case_globals)]
+    /// Returns `true` if the token is a keyword reserved for possible future use.
     pub fn is_reserved_keyword(&self) -> bool {
         match *self {
-            Ident(sid, Plain) => {
-                let n = sid.name;
-
-                   RESERVED_KEYWORD_START <= n
-                && n <= RESERVED_KEYWORD_FINAL
-            },
+            Ident(id) => id.name >= keywords::Abstract.name() &&
+                         id.name <= keywords::Yield.name(),
             _ => false,
         }
     }
@@ -369,7 +318,7 @@ impl Token {
     /// See `styntax::ext::mtwt`.
     pub fn mtwt_eq(&self, other : &Token) -> bool {
         match (self, other) {
-            (&Ident(id1,_), &Ident(id2,_)) | (&Lifetime(id1), &Lifetime(id2)) =>
+            (&Ident(id1), &Ident(id2)) | (&Lifetime(id1), &Lifetime(id2)) =>
                 mtwt::resolve(id1) == mtwt::resolve(id2),
             _ => *self == *other
         }
@@ -385,7 +334,7 @@ pub enum Nonterminal {
     NtPat(P<ast::Pat>),
     NtExpr(P<ast::Expr>),
     NtTy(P<ast::Ty>),
-    NtIdent(Box<ast::SpannedIdent>, IdentStyle),
+    NtIdent(Box<ast::SpannedIdent>),
     /// Stuff inside brackets for attributes
     NtMeta(P<ast::MetaItem>),
     NtPath(Box<ast::Path>),
@@ -422,191 +371,104 @@ impl fmt::Debug for Nonterminal {
     }
 }
 
-
-// Get the first "argument"
-macro_rules! first {
-    ( $first:expr, $( $remainder:expr, )* ) => ( $first )
-}
-
-// Get the last "argument" (has to be done recursively to avoid phoney local ambiguity error)
-macro_rules! last {
-    ( $first:expr, $( $remainder:expr, )+ ) => ( last!( $( $remainder, )+ ) );
-    ( $first:expr, ) => ( $first )
-}
-
 // In this macro, there is the requirement that the name (the number) must be monotonically
 // increasing by one in the special identifiers, starting at 0; the same holds for the keywords,
-// except starting from the next number instead of zero, and with the additional exception that
-// special identifiers are *also* allowed (they are deduplicated in the important place, the
-// interner), an exception which is demonstrated by "static" and "self".
-macro_rules! declare_special_idents_and_keywords {(
-    // So now, in these rules, why is each definition parenthesised?
-    // Answer: otherwise we get a spurious local ambiguity bug on the "}"
-    pub mod special_idents {
-        $( ($si_name:expr, $si_static:ident, $si_str:expr); )*
-    }
-
-    pub mod keywords {
-        'strict:
-        $( ($sk_name:expr, $sk_variant:ident, $sk_str:expr); )*
-        'reserved:
-        $( ($rk_name:expr, $rk_variant:ident, $rk_str:expr); )*
-    }
+// except starting from the next number instead of zero.
+macro_rules! declare_keywords {(
+    $( ($index: expr, $konst: ident, $string: expr) )*
 ) => {
-    const STRICT_KEYWORD_START: ast::Name = first!($( ast::Name($sk_name), )*);
-    const STRICT_KEYWORD_FINAL: ast::Name = last!($( ast::Name($sk_name), )*);
-    const RESERVED_KEYWORD_START: ast::Name = first!($( ast::Name($rk_name), )*);
-    const RESERVED_KEYWORD_FINAL: ast::Name = last!($( ast::Name($rk_name), )*);
-
-    pub mod special_idents {
-        use ast;
-        $(
-            #[allow(non_upper_case_globals)]
-            pub const $si_static: ast::Ident = ast::Ident {
-                name: ast::Name($si_name),
-                ctxt: ast::EMPTY_CTXT,
-            };
-         )*
-    }
-
-    pub mod special_names {
-        use ast;
-        $(
-            #[allow(non_upper_case_globals)]
-            pub const $si_static: ast::Name = ast::Name($si_name);
-        )*
-    }
-
-    /// All the valid words that have meaning in the Rust language.
-    ///
-    /// Rust keywords are either 'strict' or 'reserved'.  Strict keywords may not
-    /// appear as identifiers at all. Reserved keywords are not used anywhere in
-    /// the language and may not appear as identifiers.
     pub mod keywords {
-        pub use self::Keyword::*;
         use ast;
-
-        #[derive(Copy, Clone, PartialEq, Eq)]
-        pub enum Keyword {
-            $( $sk_variant, )*
-            $( $rk_variant, )*
+        #[derive(Clone, Copy, PartialEq, Eq)]
+        pub struct Keyword {
+            ident: ast::Ident,
         }
-
         impl Keyword {
-            pub fn to_name(&self) -> ast::Name {
-                match *self {
-                    $( $sk_variant => ast::Name($sk_name), )*
-                    $( $rk_variant => ast::Name($rk_name), )*
-                }
-            }
+            #[inline] pub fn ident(self) -> ast::Ident { self.ident }
+            #[inline] pub fn name(self) -> ast::Name { self.ident.name }
         }
+        $(
+            #[allow(non_upper_case_globals)]
+            pub const $konst: Keyword = Keyword {
+                ident: ast::Ident::with_empty_ctxt(ast::Name($index))
+            };
+        )*
     }
 
     fn mk_fresh_ident_interner() -> IdentInterner {
-        let mut init_vec = Vec::new();
-        $(init_vec.push($si_str);)*
-        $(init_vec.push($sk_str);)*
-        $(init_vec.push($rk_str);)*
-        interner::StrInterner::prefill(&init_vec[..])
+        interner::StrInterner::prefill(&[$($string,)*])
     }
 }}
 
-// If the special idents get renumbered, remember to modify these two as appropriate
-pub const SELF_KEYWORD_NAME: ast::Name = ast::Name(SELF_KEYWORD_NAME_NUM);
-const STATIC_KEYWORD_NAME: ast::Name = ast::Name(STATIC_KEYWORD_NAME_NUM);
-pub const SUPER_KEYWORD_NAME: ast::Name = ast::Name(SUPER_KEYWORD_NAME_NUM);
-const SELF_TYPE_KEYWORD_NAME: ast::Name = ast::Name(SELF_TYPE_KEYWORD_NAME_NUM);
-
-pub const SELF_KEYWORD_NAME_NUM: u32 = 1;
-const STATIC_KEYWORD_NAME_NUM: u32 = 2;
-const SUPER_KEYWORD_NAME_NUM: u32 = 3;
-const SELF_TYPE_KEYWORD_NAME_NUM: u32 = 10;
-
 // NB: leaving holes in the ident table is bad! a different ident will get
 // interned with the id from the hole, but it will be between the min and max
 // of the reserved words, and thus tagged as "reserved".
-
-declare_special_idents_and_keywords! {
-    pub mod special_idents {
-        // These ones are statics
-        (0,                          invalid,                "");
-        (super::SELF_KEYWORD_NAME_NUM,   self_,              "self");
-        (super::STATIC_KEYWORD_NAME_NUM, statik,             "static");
-        (super::SUPER_KEYWORD_NAME_NUM, super_,              "super");
-        (4,                          static_lifetime,        "'static");
-
-        // for matcher NTs
-        (5,                          tt,                     "tt");
-        (6,                          matchers,               "matchers");
-
-        // outside of libsyntax
-        (7,                          clownshoe_abi,          "__rust_abi");
-        (8,                          opaque,                 "<opaque>");
-        (9,                          __unused1,              "<__unused1>");
-        (super::SELF_TYPE_KEYWORD_NAME_NUM, type_self,       "Self");
-        (11,                         prelude_import,         "prelude_import");
-        (12,                         DEFAULT,                "default");
-    }
-
-    pub mod keywords {
-        // These ones are variants of the Keyword enum
-
-        'strict:
-        (13,                         As,         "as");
-        (14,                         Break,      "break");
-        (15,                         Crate,      "crate");
-        (16,                         Else,       "else");
-        (17,                         Enum,       "enum");
-        (18,                         Extern,     "extern");
-        (19,                         False,      "false");
-        (20,                         Fn,         "fn");
-        (21,                         For,        "for");
-        (22,                         If,         "if");
-        (23,                         Impl,       "impl");
-        (24,                         In,         "in");
-        (25,                         Let,        "let");
-        (26,                         Loop,       "loop");
-        (27,                         Match,      "match");
-        (28,                         Mod,        "mod");
-        (29,                         Move,       "move");
-        (30,                         Mut,        "mut");
-        (31,                         Pub,        "pub");
-        (32,                         Ref,        "ref");
-        (33,                         Return,     "return");
-        // Static and Self are also special idents (prefill de-dupes)
-        (super::STATIC_KEYWORD_NAME_NUM, Static, "static");
-        (super::SELF_KEYWORD_NAME_NUM, SelfValue, "self");
-        (super::SELF_TYPE_KEYWORD_NAME_NUM, SelfType, "Self");
-        (34,                         Struct,     "struct");
-        (super::SUPER_KEYWORD_NAME_NUM, Super,   "super");
-        (35,                         True,       "true");
-        (36,                         Trait,      "trait");
-        (37,                         Type,       "type");
-        (38,                         Unsafe,     "unsafe");
-        (39,                         Use,        "use");
-        (40,                         While,      "while");
-        (41,                         Continue,   "continue");
-        (42,                         Box,        "box");
-        (43,                         Const,      "const");
-        (44,                         Where,      "where");
-        'reserved:
-        (45,                         Virtual,    "virtual");
-        (46,                         Proc,       "proc");
-        (47,                         Alignof,    "alignof");
-        (48,                         Become,     "become");
-        (49,                         Offsetof,   "offsetof");
-        (50,                         Priv,       "priv");
-        (51,                         Pure,       "pure");
-        (52,                         Sizeof,     "sizeof");
-        (53,                         Typeof,     "typeof");
-        (54,                         Unsized,    "unsized");
-        (55,                         Yield,      "yield");
-        (56,                         Do,         "do");
-        (57,                         Abstract,   "abstract");
-        (58,                         Final,      "final");
-        (59,                         Override,   "override");
-        (60,                         Macro,      "macro");
-    }
+// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`,
+// this should be rarely necessary though if the keywords are kept in alphabetic order.
+declare_keywords! {
+    // Invalid identifier
+    (0,  Invalid,        "")
+
+    // Strict keywords used in the language.
+    (1,  As,             "as")
+    (2,  Box,            "box")
+    (3,  Break,          "break")
+    (4,  Const,          "const")
+    (5,  Continue,       "continue")
+    (6,  Crate,          "crate")
+    (7,  Else,           "else")
+    (8,  Enum,           "enum")
+    (9,  Extern,         "extern")
+    (10, False,          "false")
+    (11, Fn,             "fn")
+    (12, For,            "for")
+    (13, If,             "if")
+    (14, Impl,           "impl")
+    (15, In,             "in")
+    (16, Let,            "let")
+    (17, Loop,           "loop")
+    (18, Match,          "match")
+    (19, Mod,            "mod")
+    (20, Move,           "move")
+    (21, Mut,            "mut")
+    (22, Pub,            "pub")
+    (23, Ref,            "ref")
+    (24, Return,         "return")
+    (25, SelfValue,      "self")
+    (26, SelfType,       "Self")
+    (27, Static,         "static")
+    (28, Struct,         "struct")
+    (29, Super,          "super")
+    (30, Trait,          "trait")
+    (31, True,           "true")
+    (32, Type,           "type")
+    (33, Unsafe,         "unsafe")
+    (34, Use,            "use")
+    (35, Where,          "where")
+    (36, While,          "while")
+
+    // Keywords reserved for future use.
+    (37, Abstract,       "abstract")
+    (38, Alignof,        "alignof")
+    (39, Become,         "become")
+    (40, Do,             "do")
+    (41, Final,          "final")
+    (42, Macro,          "macro")
+    (43, Offsetof,       "offsetof")
+    (44, Override,       "override")
+    (45, Priv,           "priv")
+    (46, Proc,           "proc")
+    (47, Pure,           "pure")
+    (48, Sizeof,         "sizeof")
+    (49, Typeof,         "typeof")
+    (50, Unsized,        "unsized")
+    (51, Virtual,        "virtual")
+    (52, Yield,          "yield")
+
+    // Weak keywords, have special meaning only in specific contexts.
+    (53, Default,        "default")
+    (54, StaticLifetime, "'static")
+    (55, Union,          "union")
 }
 
 // looks like we can get rid of this completely...
@@ -779,6 +641,6 @@ mod tests {
         assert!(Gt.mtwt_eq(&Gt));
         let a = str_to_ident("bac");
         let a1 = mark_ident(a,92);
-        assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain)));
+        assert!(Ident(a).mtwt_eq(&Ident(a1)));
     }
 }
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index 95f1b63168b47..7bfc58e85c259 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -20,7 +20,7 @@ use attr;
 use attr::{AttrMetaMethods, AttributeMethods};
 use codemap::{self, CodeMap, BytePos};
 use errors;
-use parse::token::{self, BinOpToken, Token, InternedString};
+use parse::token::{self, keywords, BinOpToken, Token, InternedString};
 use parse::lexer::comments;
 use parse;
 use print::pp::{self, break_offset, word, space, zerobreak, hardbreak};
@@ -270,14 +270,14 @@ pub fn token_to_string(tok: &Token) -> String {
         }
 
         /* Name components */
-        token::Ident(s, _)          => s.to_string(),
+        token::Ident(s)             => s.to_string(),
         token::Lifetime(s)          => s.to_string(),
         token::Underscore           => "_".to_string(),
 
         /* Other */
         token::DocComment(s)        => s.to_string(),
-        token::SubstNt(s, _)        => format!("${}", s),
-        token::MatchNt(s, t, _, _)  => format!("${}:{}", s, t),
+        token::SubstNt(s)           => format!("${}", s),
+        token::MatchNt(s, t)        => format!("${}:{}", s, t),
         token::Eof                  => "<eof>".to_string(),
         token::Whitespace           => " ".to_string(),
         token::Comment              => "/* */".to_string(),
@@ -294,7 +294,7 @@ pub fn token_to_string(tok: &Token) -> String {
             token::NtBlock(ref e)       => block_to_string(&e),
             token::NtStmt(ref e)        => stmt_to_string(&e),
             token::NtPat(ref e)         => pat_to_string(&e),
-            token::NtIdent(ref e, _)    => ident_to_string(e.node),
+            token::NtIdent(ref e)       => ident_to_string(e.node),
             token::NtTT(ref e)          => tt_to_string(&e),
             token::NtArm(ref e)         => arm_to_string(&e),
             token::NtImplItem(ref e)    => impl_item_to_string(&e),
@@ -995,7 +995,7 @@ impl<'a> State<'a> {
             ast::TyKind::BareFn(ref f) => {
                 let generics = ast::Generics {
                     lifetimes: f.lifetimes.clone(),
-                    ty_params: P::empty(),
+                    ty_params: P::new(),
                     where_clause: ast::WhereClause {
                         id: ast::DUMMY_NODE_ID,
                         predicates: Vec::new(),
@@ -1488,20 +1488,11 @@ impl<'a> State<'a> {
 
     pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> io::Result<()> {
         self.ibox(0)?;
-        let mut suppress_space = false;
         for (i, tt) in tts.iter().enumerate() {
-            if i != 0 && !suppress_space {
+            if i != 0 {
                 space(&mut self.s)?;
             }
             self.print_tt(tt)?;
-            // There should be no space between the module name and the following `::` in paths,
-            // otherwise imported macros get re-parsed from crate metadata incorrectly (#20701)
-            suppress_space = match *tt {
-                TokenTree::Token(_, token::Ident(_, token::ModName)) |
-                TokenTree::Token(_, token::MatchNt(_, _, _, token::ModName)) |
-                TokenTree::Token(_, token::SubstNt(_, token::ModName)) => true,
-                _ => false
-            }
         }
         self.end()
     }
@@ -2086,7 +2077,7 @@ impl<'a> State<'a> {
                 }
                 self.bclose_(expr.span, INDENT_UNIT)?;
             }
-            ast::ExprKind::Closure(capture_clause, ref decl, ref body) => {
+            ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => {
                 self.print_capture_clause(capture_clause)?;
 
                 self.print_fn_block_args(&decl)?;
@@ -2966,9 +2957,8 @@ impl<'a> State<'a> {
             ast::TyKind::Infer if is_closure => self.print_pat(&input.pat)?,
             _ => {
                 match input.pat.node {
-                    PatKind::Ident(_, ref path1, _) if
-                        path1.node.name ==
-                            parse::token::special_idents::invalid.name => {
+                    PatKind::Ident(_, ref path1, _)
+                            if path1.node.name == keywords::Invalid.name() => {
                         // Do nothing.
                     }
                     _ => {
@@ -3021,7 +3011,7 @@ impl<'a> State<'a> {
         }
         let generics = ast::Generics {
             lifetimes: Vec::new(),
-            ty_params: P::empty(),
+            ty_params: P::new(),
             where_clause: ast::WhereClause {
                 id: ast::DUMMY_NODE_ID,
                 predicates: Vec::new(),
diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs
index fda9741d35c41..9d04cb75daa0e 100644
--- a/src/libsyntax/ptr.rs
+++ b/src/libsyntax/ptr.rs
@@ -83,10 +83,10 @@ impl<T: 'static> P<T> {
     }
 }
 
-impl<T> Deref for P<T> {
+impl<T: ?Sized> Deref for P<T> {
     type Target = T;
 
-    fn deref<'a>(&'a self) -> &'a T {
+    fn deref(&self) -> &T {
         &self.ptr
     }
 }
@@ -97,11 +97,12 @@ impl<T: 'static + Clone> Clone for P<T> {
     }
 }
 
-impl<T: Debug> Debug for P<T> {
+impl<T: ?Sized + Debug> Debug for P<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        Debug::fmt(&**self, f)
+        Debug::fmt(&self.ptr, f)
     }
 }
+
 impl<T: Display> Display for P<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         Display::fmt(&**self, f)
@@ -126,19 +127,8 @@ impl<T: Encodable> Encodable for P<T> {
     }
 }
 
-
-impl<T:fmt::Debug> fmt::Debug for P<[T]> {
-    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
-        self.ptr.fmt(fmt)
-    }
-}
-
 impl<T> P<[T]> {
     pub fn new() -> P<[T]> {
-        P::empty()
-    }
-
-    pub fn empty() -> P<[T]> {
         P { ptr: Default::default() }
     }
 
@@ -151,31 +141,11 @@ impl<T> P<[T]> {
     pub fn into_vec(self) -> Vec<T> {
         self.ptr.into_vec()
     }
-
-    pub fn as_slice<'a>(&'a self) -> &'a [T] {
-        &self.ptr
-    }
-
-    pub fn move_iter(self) -> vec::IntoIter<T> {
-        self.into_vec().into_iter()
-    }
-
-    pub fn map<U, F: FnMut(&T) -> U>(&self, f: F) -> P<[U]> {
-        self.iter().map(f).collect()
-    }
-}
-
-impl<T> Deref for P<[T]> {
-    type Target = [T];
-
-    fn deref(&self) -> &[T] {
-        self.as_slice()
-    }
 }
 
 impl<T> Default for P<[T]> {
     fn default() -> P<[T]> {
-        P::empty()
+        P::new()
     }
 }
 
diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs
index 9049b21d8b4bb..84a7b14484828 100644
--- a/src/libsyntax/std_inject.rs
+++ b/src/libsyntax/std_inject.rs
@@ -14,7 +14,7 @@ use codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute};
 use codemap;
 use fold::Folder;
 use fold;
-use parse::token::{intern, InternedString, special_idents};
+use parse::token::{intern, InternedString, keywords};
 use parse::{token, ParseSess};
 use ptr::P;
 use util::small_vector::SmallVector;
@@ -148,7 +148,7 @@ impl fold::Folder for PreludeInjector {
         let vp = P(codemap::dummy_spanned(ast::ViewPathGlob(prelude_path)));
         mod_.items.insert(0, P(ast::Item {
             id: ast::DUMMY_NODE_ID,
-            ident: special_idents::invalid,
+            ident: keywords::Invalid.ident(),
             node: ast::ItemKind::Use(vp),
             attrs: vec![ast::Attribute {
                 span: self.span,
@@ -157,7 +157,9 @@ impl fold::Folder for PreludeInjector {
                     style: ast::AttrStyle::Outer,
                     value: P(ast::MetaItem {
                         span: self.span,
-                        node: ast::MetaItemKind::Word(special_idents::prelude_import.name.as_str()),
+                        node: ast::MetaItemKind::Word(
+                            token::intern_and_get_ident("prelude_import")
+                        ),
                     }),
                     is_sugared_doc: false,
                 },
diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs
index 703b1611540c8..8eeb61e0de46c 100644
--- a/src/libsyntax/test.rs
+++ b/src/libsyntax/test.rs
@@ -31,7 +31,7 @@ use ext::expand::ExpansionConfig;
 use fold::Folder;
 use util::move_map::MoveMap;
 use fold;
-use parse::token::{intern, InternedString};
+use parse::token::{intern, keywords, InternedString};
 use parse::{token, ParseSess};
 use print::pprust;
 use ast;
@@ -116,7 +116,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
 
     fn fold_item(&mut self, i: P<ast::Item>) -> SmallVector<P<ast::Item>> {
         let ident = i.ident;
-        if ident.name != token::special_idents::invalid.name {
+        if ident.name != keywords::Invalid.name() {
             self.cx.path.push(ident);
         }
         debug!("current path: {}", path_name_i(&self.cx.path));
@@ -160,7 +160,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
             ast::ItemKind::Mod(..) => fold::noop_fold_item(i, self),
             _ => SmallVector::one(i),
         };
-        if ident.name != token::special_idents::invalid.name {
+        if ident.name != keywords::Invalid.name() {
             self.cx.path.pop();
         }
         res
@@ -453,7 +453,7 @@ fn mk_std(cx: &TestCtxt) -> P<ast::Item> {
         (ast::ItemKind::Use(
             P(nospan(ast::ViewPathSimple(id_test,
                                          path_node(vec!(id_test)))))),
-         ast::Visibility::Public, token::special_idents::invalid)
+         ast::Visibility::Public, keywords::Invalid.ident())
     } else {
         (ast::ItemKind::ExternCrate(None), ast::Visibility::Inherited, id_test)
     };
@@ -545,7 +545,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
 
         P(ast::Item {
             id: ast::DUMMY_NODE_ID,
-            ident: token::special_idents::invalid,
+            ident: keywords::Invalid.ident(),
             attrs: vec![],
             node: ast::ItemKind::Use(P(use_path)),
             vis: ast::Visibility::Inherited,
@@ -590,7 +590,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
     let struct_type = ecx.ty_path(ecx.path(sp, vec![ecx.ident_of("self"),
                                                     ecx.ident_of("test"),
                                                     ecx.ident_of("TestDescAndFn")]));
-    let static_lt = ecx.lifetime(sp, token::special_idents::static_lifetime.name);
+    let static_lt = ecx.lifetime(sp, keywords::StaticLifetime.name());
     // &'static [self::test::TestDescAndFn]
     let static_type = ecx.ty_rptr(sp,
                                   ecx.ty(sp, ast::TyKind::Vec(struct_type)),
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index 413017c727177..f50a480e5e55a 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -743,7 +743,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
             visitor.visit_expr(subexpression);
             walk_list!(visitor, visit_arm, arms);
         }
-        ExprKind::Closure(_, ref function_declaration, ref body) => {
+        ExprKind::Closure(_, ref function_declaration, ref body, _decl_span) => {
             visitor.visit_fn(FnKind::Closure,
                              function_declaration,
                              body,
diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs
index 85453f6dfcbc8..dce808756cf6a 100644
--- a/src/libsyntax_ext/concat_idents.rs
+++ b/src/libsyntax_ext/concat_idents.rs
@@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
             }
         } else {
             match *e {
-                TokenTree::Token(_, token::Ident(ident, _)) => {
+                TokenTree::Token(_, token::Ident(ident)) => {
                     res_str.push_str(&ident.name.as_str())
                 },
                 _ => {
diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs
index b8ba1a58f2163..5251b0d08d449 100644
--- a/src/libsyntax_ext/deriving/generic/mod.rs
+++ b/src/libsyntax_ext/deriving/generic/mod.rs
@@ -201,8 +201,7 @@ use syntax::codemap::{self, DUMMY_SP};
 use syntax::codemap::Span;
 use syntax::errors::Handler;
 use syntax::util::move_map::MoveMap;
-use syntax::parse::token::{intern, InternedString};
-use syntax::parse::token::special_idents;
+use syntax::parse::token::{intern, keywords, InternedString};
 use syntax::ptr::P;
 
 use self::ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty};
@@ -526,7 +525,7 @@ impl<'a> TraitDef<'a> {
                         span: self.span,
                         bound_lifetimes: wb.bound_lifetimes.clone(),
                         bounded_ty: wb.bounded_ty.clone(),
-                        bounds: P::from_vec(wb.bounds.iter().cloned().collect())
+                        bounds: wb.bounds.iter().cloned().collect(),
                     })
                 }
                 ast::WherePredicate::RegionPredicate(ref rb) => {
@@ -596,9 +595,9 @@ impl<'a> TraitDef<'a> {
         let trait_ref = cx.trait_ref(trait_path);
 
         // Create the type parameters on the `self` path.
-        let self_ty_params = generics.ty_params.map(|ty_param| {
+        let self_ty_params = generics.ty_params.iter().map(|ty_param| {
             cx.ty_ident(self.span, ty_param.ident)
-        });
+        }).collect();
 
         let self_lifetimes: Vec<ast::Lifetime> =
             generics.lifetimes
@@ -609,7 +608,7 @@ impl<'a> TraitDef<'a> {
         // Create the type of `self`.
         let self_type = cx.ty_path(
             cx.path_all(self.span, false, vec!( type_ident ), self_lifetimes,
-                        self_ty_params.into_vec(), Vec::new()));
+                        self_ty_params, Vec::new()));
 
         let attr = cx.attribute(
             self.span,
@@ -635,7 +634,7 @@ impl<'a> TraitDef<'a> {
 
         cx.item(
             self.span,
-            special_idents::invalid,
+            keywords::Invalid.ident(),
             a,
             ast::ItemKind::Impl(unsafety,
                                 ast::ImplPolarity::Positive,
@@ -866,7 +865,7 @@ impl<'a> MethodDef<'a> {
             // creating fresh self id
             _ => Some(ast::Arg::new_self(trait_.span,
                                          ast::Mutability::Immutable,
-                                         special_idents::self_))
+                                         keywords::SelfValue.ident()))
         };
         let args = {
             let args = arg_types.into_iter().map(|(name, ty)| {
diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs
index a924cc0695377..e31d45d91a59f 100644
--- a/src/libsyntax_ext/deriving/generic/ty.rs
+++ b/src/libsyntax_ext/deriving/generic/ty.rs
@@ -19,7 +19,7 @@ use syntax::ast::{Expr,Generics,Ident};
 use syntax::ext::base::ExtCtxt;
 use syntax::ext::build::AstBuilder;
 use syntax::codemap::{Span,respan};
-use syntax::parse::token::special_idents;
+use syntax::parse::token::keywords;
 use syntax::ptr::P;
 
 /// The types of pointers
@@ -169,15 +169,14 @@ impl<'a> Ty<'a> {
                    -> ast::Path {
         match *self {
             Self_ => {
-                let self_params = self_generics.ty_params.map(|ty_param| {
+                let self_params = self_generics.ty_params.iter().map(|ty_param| {
                     cx.ty_ident(span, ty_param.ident)
-                });
+                }).collect();
                 let lifetimes = self_generics.lifetimes.iter()
                                                        .map(|d| d.lifetime)
                                                        .collect();
 
-                cx.path_all(span, false, vec!(self_ty), lifetimes,
-                            self_params.into_vec(), Vec::new())
+                cx.path_all(span, false, vec![self_ty], lifetimes, self_params, Vec::new())
             }
             Literal(ref p) => {
                 p.to_path(cx, span, self_ty, self_generics)
@@ -264,7 +263,7 @@ pub fn get_explicit_self(cx: &ExtCtxt, span: Span, self_ptr: &Option<PtrTy>)
     let self_path = cx.expr_self(span);
     match *self_ptr {
         None => {
-            (self_path, respan(span, ast::SelfKind::Value(special_idents::self_)))
+            (self_path, respan(span, ast::SelfKind::Value(keywords::SelfValue.ident())))
         }
         Some(ref ptr) => {
             let self_ty = respan(
@@ -272,7 +271,7 @@ pub fn get_explicit_self(cx: &ExtCtxt, span: Span, self_ptr: &Option<PtrTy>)
                 match *ptr {
                     Borrowed(ref lt, mutbl) => {
                         let lt = lt.map(|s| cx.lifetime(span, cx.ident_of(s).name));
-                        ast::SelfKind::Region(lt, mutbl, special_idents::self_)
+                        ast::SelfKind::Region(lt, mutbl, keywords::SelfValue.ident())
                     }
                     Raw(_) => cx.span_bug(span, "attempted to use *self in deriving definition")
                 });
diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs
index fd68ba7342798..6c61d6b914c56 100644
--- a/src/libsyntax_ext/format.rs
+++ b/src/libsyntax_ext/format.rs
@@ -19,8 +19,7 @@ use syntax::ext::base::*;
 use syntax::ext::base;
 use syntax::ext::build::AstBuilder;
 use syntax::fold::Folder;
-use syntax::parse::token::special_idents;
-use syntax::parse::token;
+use syntax::parse::token::{self, keywords};
 use syntax::ptr::P;
 
 use std::collections::HashMap;
@@ -106,7 +105,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
             named = true;
             let ident = match p.token {
-                token::Ident(i, _) => {
+                token::Ident(i) => {
                     p.bump();
                     i
                 }
@@ -449,7 +448,7 @@ impl<'a, 'b> Context<'a, 'b> {
         let sp = piece_ty.span;
         let ty = ecx.ty_rptr(sp,
             ecx.ty(sp, ast::TyKind::Vec(piece_ty)),
-            Some(ecx.lifetime(sp, special_idents::static_lifetime.name)),
+            Some(ecx.lifetime(sp, keywords::StaticLifetime.name())),
             ast::Mutability::Immutable);
         let slice = ecx.expr_vec_slice(sp, pieces);
         // static instead of const to speed up codegen by not requiring this to be inlined
@@ -475,7 +474,7 @@ impl<'a, 'b> Context<'a, 'b> {
 
         // First, build up the static array which will become our precompiled
         // format "string"
-        let static_lifetime = self.ecx.lifetime(self.fmtsp, special_idents::static_lifetime.name);
+        let static_lifetime = self.ecx.lifetime(self.fmtsp, keywords::StaticLifetime.name());
         let piece_ty = self.ecx.ty_rptr(
                 self.fmtsp,
                 self.ecx.ty_ident(self.fmtsp, self.ecx.ident_of("str")),
diff --git a/src/test/auxiliary/roman_numerals.rs b/src/test/auxiliary/roman_numerals.rs
index c262b0dba2553..839ece49c3eb5 100644
--- a/src/test/auxiliary/roman_numerals.rs
+++ b/src/test/auxiliary/roman_numerals.rs
@@ -48,7 +48,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
     }
 
     let text = match args[0] {
-        TokenTree::Token(_, token::Ident(s, _)) => s.to_string(),
+        TokenTree::Token(_, token::Ident(s)) => s.to_string(),
         _ => {
             cx.span_err(sp, "argument should be a single identifier");
             return DummyResult::any(sp);
diff --git a/src/test/compile-fail/fail-simple.rs b/src/test/compile-fail/fail-simple.rs
index 97b709592a9c9..e889d35477059 100644
--- a/src/test/compile-fail/fail-simple.rs
+++ b/src/test/compile-fail/fail-simple.rs
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-
-// error-pattern:unexpected token
 fn main() {
-  panic!(@);
+    panic!(@); //~ ERROR expected expression, found `@`
 }
diff --git a/src/test/compile-fail/import-ty-params.rs b/src/test/compile-fail/import-ty-params.rs
index 66d4d6d06211d..7344f31535fba 100644
--- a/src/test/compile-fail/import-ty-params.rs
+++ b/src/test/compile-fail/import-ty-params.rs
@@ -20,6 +20,6 @@ macro_rules! import {
     ($p: path) => (use $p;);
 }
 
-import! { a::b::c::S<u8> } //~ERROR type or lifetime parameter is found in import path
+import! { a::b::c::S<u8> } //~ERROR type or lifetime parameters in import path
 
 fn main() {}
diff --git a/src/test/compile-fail/issue-10636-2.rs b/src/test/compile-fail/issue-10636-2.rs
index 747252d59241e..beaf9e5059fa2 100644
--- a/src/test/compile-fail/issue-10636-2.rs
+++ b/src/test/compile-fail/issue-10636-2.rs
@@ -15,4 +15,4 @@ pub fn trace_option(option: Option<isize>) {
     option.map(|some| 42; //~ NOTE: unclosed delimiter
                           //~^ ERROR: expected one of
 } //~ ERROR: incorrect close delimiter
-//~^ ERROR: unexpected token
+//~^ ERROR: expected expression, found `)`
diff --git a/src/test/compile-fail/issue-31804.rs b/src/test/compile-fail/issue-31804.rs
index b6a04bee85d4f..cea52b11c5ded 100644
--- a/src/test/compile-fail/issue-31804.rs
+++ b/src/test/compile-fail/issue-31804.rs
@@ -13,4 +13,4 @@
 
 fn main() {
     let
-} //~ ERROR unexpected token: `}`
+} //~ ERROR expected pattern, found `}`
diff --git a/src/test/compile-fail/keyword-false-as-identifier.rs b/src/test/compile-fail/keyword-false-as-identifier.rs
index 60caca3da57ed..e8af94f16b1b9 100644
--- a/src/test/compile-fail/keyword-false-as-identifier.rs
+++ b/src/test/compile-fail/keyword-false-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -8,10 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// ignore-test -- FIXME #33010
-
-// This file was auto-generated using 'src/etc/generate-keyword-tests.py false'
-
 fn main() {
-    let false = "foo"; //~ error: ident
+    let false = "foo"; //~ error: mismatched types
 }
diff --git a/src/test/compile-fail/keyword-true-as-identifier.rs b/src/test/compile-fail/keyword-true-as-identifier.rs
index 716a0ebf21cec..90414fa912dba 100644
--- a/src/test/compile-fail/keyword-true-as-identifier.rs
+++ b/src/test/compile-fail/keyword-true-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -8,10 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// ignore-test -- FIXME #33010
-
-// This file was auto-generated using 'src/etc/generate-keyword-tests.py true'
-
 fn main() {
-    let true = "foo"; //~ error: ident
+    let true = "foo"; //~ error: mismatched types
 }
diff --git a/src/test/compile-fail/macro-context.rs b/src/test/compile-fail/macro-context.rs
index 8fa5e0a70890e..5d07f0747ff43 100644
--- a/src/test/compile-fail/macro-context.rs
+++ b/src/test/compile-fail/macro-context.rs
@@ -12,7 +12,7 @@
 
 // (typeof used because it's surprisingly hard to find an unparsed token after a stmt)
 macro_rules! m {
-    () => ( i ; typeof );   //~ ERROR `typeof` is a reserved keyword
+    () => ( i ; typeof );   //~ ERROR expected expression, found reserved keyword `typeof`
                             //~| ERROR macro expansion ignores token `typeof`
                             //~| ERROR macro expansion ignores token `;`
                             //~| ERROR macro expansion ignores token `;`
diff --git a/src/test/compile-fail/macro-incomplete-parse.rs b/src/test/compile-fail/macro-incomplete-parse.rs
index 0d5f9079649c4..8d515622e53ff 100644
--- a/src/test/compile-fail/macro-incomplete-parse.rs
+++ b/src/test/compile-fail/macro-incomplete-parse.rs
@@ -19,7 +19,7 @@ macro_rules! ignored_item {
 }
 
 macro_rules! ignored_expr {
-    () => ( 1,  //~ ERROR unexpected token: `,`
+    () => ( 1,  //~ ERROR expected expression, found `,`
             2 )
 }
 
diff --git a/src/libsyntax/owned_slice.rs b/src/test/compile-fail/privacy/restricted/ty-params.rs
similarity index 60%
rename from src/libsyntax/owned_slice.rs
rename to src/test/compile-fail/privacy/restricted/ty-params.rs
index 33a3d5785981a..04d8e9833045a 100644
--- a/src/libsyntax/owned_slice.rs
+++ b/src/test/compile-fail/privacy/restricted/ty-params.rs
@@ -1,4 +1,4 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -8,7 +8,13 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-/// A non-growable owned slice.
-#[unstable(feature = "rustc_private", issue = "0")]
-#[rustc_deprecated(since = "1.7.0", reason = "use `ptr::P<[T]>` instead")]
-pub type OwnedSlice<T> = ::ptr::P<[T]>;
+#![feature(pub_restricted)]
+
+macro_rules! m {
+    ($p: path) => (pub($p) struct Z;)
+}
+
+struct S<T>(T);
+m!{ S<u8> } //~ ERROR type or lifetime parameters in visibility path
+
+fn main() {}
diff --git a/src/test/compile-fail/qualified-path-params-2.rs b/src/test/compile-fail/qualified-path-params-2.rs
new file mode 100644
index 0000000000000..5c661bfcdc0c9
--- /dev/null
+++ b/src/test/compile-fail/qualified-path-params-2.rs
@@ -0,0 +1,31 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that qualified paths with type parameters
+// fail during type checking and not during parsing
+
+struct S;
+
+trait Tr {
+    type A;
+}
+
+impl Tr for S {
+    type A = S;
+}
+
+impl S {
+    fn f<T>() {}
+}
+
+type A = <S as Tr>::A::f<u8>; //~ ERROR type parameters are not allowed on this type
+//~^ ERROR ambiguous associated type; specify the type using the syntax `<<S as Tr>::A as Trait>::f`
+
+fn main() {}
diff --git a/src/test/compile-fail/qualified-path-params.rs b/src/test/compile-fail/qualified-path-params.rs
new file mode 100644
index 0000000000000..002080f4cb44c
--- /dev/null
+++ b/src/test/compile-fail/qualified-path-params.rs
@@ -0,0 +1,33 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that qualified paths with type parameters
+// fail during type checking and not during parsing
+
+struct S;
+
+trait Tr {
+    type A;
+}
+
+impl Tr for S {
+    type A = S;
+}
+
+impl S {
+    fn f<T>() {}
+}
+
+fn main() {
+    match 10 {
+        <S as Tr>::A::f::<u8> => {} //~ ERROR `f` is not an associated const
+        0 ... <S as Tr>::A::f::<u8> => {} //~ ERROR only char and numeric types are allowed in range
+    }
+}
diff --git a/src/test/compile-fail/reserved-become.rs b/src/test/compile-fail/reserved-become.rs
index 82e9ebc10d1c8..bcda61e363def 100644
--- a/src/test/compile-fail/reserved-become.rs
+++ b/src/test/compile-fail/reserved-become.rs
@@ -10,5 +10,5 @@
 
 fn main() {
     let become = 0;
-    //~^ ERROR `become` is a reserved keyword
+    //~^ ERROR expected pattern, found reserved keyword `become`
 }
diff --git a/src/test/compile-fail/self_type_keyword.rs b/src/test/compile-fail/self_type_keyword.rs
index b28f48bb1056e..b9c9d7a389b95 100644
--- a/src/test/compile-fail/self_type_keyword.rs
+++ b/src/test/compile-fail/self_type_keyword.rs
@@ -14,7 +14,7 @@ struct Self;
 //~^ ERROR expected identifier, found keyword `Self`
 
 struct Bar<'Self>;
-//~^ ERROR invalid lifetime name
+//~^ ERROR lifetimes cannot use keyword names
 
 pub fn main() {
     let Self = 5;
diff --git a/src/test/compile-fail/token-error-correct.rs b/src/test/compile-fail/token-error-correct.rs
index 6c54acd7bdbf6..f5fecf3e1740a 100644
--- a/src/test/compile-fail/token-error-correct.rs
+++ b/src/test/compile-fail/token-error-correct.rs
@@ -13,8 +13,10 @@
 fn main() {
     foo(bar(; //~ NOTE: unclosed delimiter
     //~^ NOTE: unclosed delimiter
-    //~^^ ERROR: unexpected token: `;`
+    //~^^ ERROR: expected expression, found `;`
     //~^^^ ERROR: unresolved name `bar`
     //~^^^^ ERROR: unresolved name `foo`
+    //~^^^^^ ERROR: expected one of `)`, `,`, `.`, `<`, `?`
 } //~ ERROR: incorrect close delimiter: `}`
 //~^ ERROR: incorrect close delimiter: `}`
+//~^^ ERROR: expected expression, found `)`
diff --git a/src/test/compile-fail/use-keyword.rs b/src/test/compile-fail/use-keyword.rs
new file mode 100644
index 0000000000000..040db0255678d
--- /dev/null
+++ b/src/test/compile-fail/use-keyword.rs
@@ -0,0 +1,23 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that imports with nakes super and self don't fail during parsing
+// FIXME: this shouldn't fail during name resolution either
+
+mod a {
+    mod b {
+        use self as A; //~ ERROR `self` imports are only allowed within a { } list
+        //~^ ERROR unresolved import `self`. There is no `self` in the crate root
+        use super as B; //~ ERROR unresolved import `super`. There is no `super` in the crate root
+        use super::{self as C}; //~ERROR unresolved import `super`. There is no `super` in the crate
+    }
+}
+
+fn main() {}
diff --git a/src/test/parse-fail/use-mod-4.rs b/src/test/compile-fail/use-mod-4.rs
similarity index 80%
rename from src/test/parse-fail/use-mod-4.rs
rename to src/test/compile-fail/use-mod-4.rs
index bcafa4e9fde25..146d37f41d638 100644
--- a/src/test/parse-fail/use-mod-4.rs
+++ b/src/test/compile-fail/use-mod-4.rs
@@ -8,9 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z parse-only
-
-use foo::self;
-//~^ ERROR expected identifier, found keyword `self`
+use foo::self; //~ ERROR unresolved import `foo::self`
+//~^ ERROR `self` imports are only allowed within a { } list
 
 fn main() {}
diff --git a/src/test/compile-fail/vec-macro-with-comma-only.rs b/src/test/compile-fail/vec-macro-with-comma-only.rs
index 8c8e789cd9640..346cf1ec555d3 100644
--- a/src/test/compile-fail/vec-macro-with-comma-only.rs
+++ b/src/test/compile-fail/vec-macro-with-comma-only.rs
@@ -9,5 +9,5 @@
 // except according to those terms.
 
 pub fn main() {
-    vec!(,); //~ ERROR unexpected token
+    vec!(,); //~ ERROR expected expression, found `,`
 }
diff --git a/src/test/parse-fail/issue-10412.rs b/src/test/parse-fail/issue-10412.rs
index b75e7b12bbdc9..fc2598d1e9d2f 100644
--- a/src/test/parse-fail/issue-10412.rs
+++ b/src/test/parse-fail/issue-10412.rs
@@ -11,17 +11,17 @@
 // compile-flags: -Z parse-only -Z continue-parse-after-error
 
 
-trait Serializable<'self, T> { //~ ERROR no longer a special lifetime
-    fn serialize(val : &'self T) -> Vec<u8> ; //~ ERROR no longer a special lifetime
-    fn deserialize(repr : &[u8]) -> &'self T; //~ ERROR no longer a special lifetime
+trait Serializable<'self, T> { //~ ERROR lifetimes cannot use keyword names
+    fn serialize(val : &'self T) -> Vec<u8> ; //~ ERROR lifetimes cannot use keyword names
+    fn deserialize(repr : &[u8]) -> &'self T; //~ ERROR lifetimes cannot use keyword names
 }
 
-impl<'self> Serializable<str> for &'self str { //~ ERROR no longer a special lifetime
-    //~^ ERROR no longer a special lifetime
-    fn serialize(val : &'self str) -> Vec<u8> { //~ ERROR no longer a special lifetime
+impl<'self> Serializable<str> for &'self str { //~ ERROR lifetimes cannot use keyword names
+    //~^ ERROR lifetimes cannot use keyword names
+    fn serialize(val : &'self str) -> Vec<u8> { //~ ERROR lifetimes cannot use keyword names
         vec!(1)
     }
-    fn deserialize(repr: &[u8]) -> &'self str { //~ ERROR no longer a special lifetime
+    fn deserialize(repr: &[u8]) -> &'self str { //~ ERROR lifetimes cannot use keyword names
         "hi"
     }
 }
diff --git a/src/test/parse-fail/issue-14303-path.rs b/src/test/parse-fail/issue-14303-path.rs
index 7c30b5f26296b..431a917c2d9f4 100644
--- a/src/test/parse-fail/issue-14303-path.rs
+++ b/src/test/parse-fail/issue-14303-path.rs
@@ -12,4 +12,4 @@
 
 fn bar<'a, T>(x: mymodule::X<'a, T, 'b, 'c>) {}
 //~^ ERROR lifetime parameters must be declared prior to type parameters
-//~^^ ERROR unexpected token
+//~^^ ERROR expected pattern, found `'c`
diff --git a/src/test/parse-fail/issue-32505.rs b/src/test/parse-fail/issue-32505.rs
index e697e98bc0607..246941ff2597d 100644
--- a/src/test/parse-fail/issue-32505.rs
+++ b/src/test/parse-fail/issue-32505.rs
@@ -11,7 +11,7 @@
 // compile-flags: -Z parse-only -Z continue-parse-after-error
 
 pub fn test() {
-    foo(|_|) //~ ERROR unexpected token: `)`
+    foo(|_|) //~ ERROR expected expression, found `)`
 }
 
 fn main() { }
diff --git a/src/test/parse-fail/keyword-abstract.rs b/src/test/parse-fail/keyword-abstract.rs
index bd3fbbe79a88b..2db5a5c583ac3 100644
--- a/src/test/parse-fail/keyword-abstract.rs
+++ b/src/test/parse-fail/keyword-abstract.rs
@@ -11,5 +11,5 @@
 // compile-flags: -Z parse-only
 
 fn main() {
-    let abstract = (); //~ ERROR `abstract` is a reserved keyword
+    let abstract = (); //~ ERROR expected pattern, found reserved keyword `abstract`
 }
diff --git a/src/test/parse-fail/keyword-as-as-identifier.rs b/src/test/parse-fail/keyword-as-as-identifier.rs
index bc05a7c4f25fb..c6070c456e8a0 100644
--- a/src/test/parse-fail/keyword-as-as-identifier.rs
+++ b/src/test/parse-fail/keyword-as-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py as'
 
 fn main() {
-    let as = "foo"; //~ error: ident
+    let as = "foo"; //~ error: expected pattern, found keyword `as`
 }
diff --git a/src/test/parse-fail/lifetime-obsoleted-self.rs b/src/test/parse-fail/keyword-box-as-identifier.rs
similarity index 71%
rename from src/test/parse-fail/lifetime-obsoleted-self.rs
rename to src/test/parse-fail/keyword-box-as-identifier.rs
index e8b76750eb985..b5abe14dbe872 100644
--- a/src/test/parse-fail/lifetime-obsoleted-self.rs
+++ b/src/test/parse-fail/keyword-box-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -10,6 +10,6 @@
 
 // compile-flags: -Z parse-only
 
-fn baz(a: &'self isize) { } //~ ERROR invalid lifetime name: 'self is no longer a special lifetime
-
-fn main() { }
+fn main() {
+    let box = "foo"; //~ error: expected pattern, found `=`
+}
diff --git a/src/test/parse-fail/keyword-break-as-identifier.rs b/src/test/parse-fail/keyword-break-as-identifier.rs
index bd7527f399e59..65c775fa1b6cc 100644
--- a/src/test/parse-fail/keyword-break-as-identifier.rs
+++ b/src/test/parse-fail/keyword-break-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py break'
 
 fn main() {
-    let break = "foo"; //~ error: ident
+    let break = "foo"; //~ error: expected pattern, found keyword `break`
 }
diff --git a/src/test/parse-fail/keyword-const-as-identifier.rs b/src/test/parse-fail/keyword-const-as-identifier.rs
new file mode 100644
index 0000000000000..6ecf14957e32b
--- /dev/null
+++ b/src/test/parse-fail/keyword-const-as-identifier.rs
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z parse-only
+
+// This file was auto-generated using 'src/etc/generate-keyword-tests.py const'
+
+fn main() {
+    let const = "foo"; //~ error: expected pattern, found keyword `const`
+}
diff --git a/src/test/parse-fail/keyword-continue-as-identifier.rs b/src/test/parse-fail/keyword-continue-as-identifier.rs
new file mode 100644
index 0000000000000..87377ac83642f
--- /dev/null
+++ b/src/test/parse-fail/keyword-continue-as-identifier.rs
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z parse-only
+
+// This file was auto-generated using 'src/etc/generate-keyword-tests.py continue'
+
+fn main() {
+    let continue = "foo"; //~ error: expected pattern, found keyword `continue`
+}
diff --git a/src/test/parse-fail/keyword-crate-as-identifier.rs b/src/test/parse-fail/keyword-crate-as-identifier.rs
new file mode 100644
index 0000000000000..8a914ca7b178c
--- /dev/null
+++ b/src/test/parse-fail/keyword-crate-as-identifier.rs
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z parse-only
+
+// This file was auto-generated using 'src/etc/generate-keyword-tests.py crate'
+
+fn main() {
+    let crate = "foo"; //~ error: expected pattern, found keyword `crate`
+}
diff --git a/src/test/parse-fail/keyword-else-as-identifier.rs b/src/test/parse-fail/keyword-else-as-identifier.rs
index 24bd18a738fbe..6878f7fea039d 100644
--- a/src/test/parse-fail/keyword-else-as-identifier.rs
+++ b/src/test/parse-fail/keyword-else-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py else'
 
 fn main() {
-    let else = "foo"; //~ error: ident
+    let else = "foo"; //~ error: expected pattern, found keyword `else`
 }
diff --git a/src/test/parse-fail/keyword-enum-as-identifier.rs b/src/test/parse-fail/keyword-enum-as-identifier.rs
index e474527041428..042a02d79e003 100644
--- a/src/test/parse-fail/keyword-enum-as-identifier.rs
+++ b/src/test/parse-fail/keyword-enum-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py enum'
 
 fn main() {
-    let enum = "foo"; //~ error: ident
+    let enum = "foo"; //~ error: expected pattern, found keyword `enum`
 }
diff --git a/src/test/parse-fail/keyword-extern-as-identifier.rs b/src/test/parse-fail/keyword-extern-as-identifier.rs
index 579cd9f916652..3bbe24ed56c63 100644
--- a/src/test/parse-fail/keyword-extern-as-identifier.rs
+++ b/src/test/parse-fail/keyword-extern-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py extern'
 
 fn main() {
-    let extern = "foo"; //~ error: ident
+    let extern = "foo"; //~ error: expected pattern, found keyword `extern`
 }
diff --git a/src/test/parse-fail/keyword-final.rs b/src/test/parse-fail/keyword-final.rs
index 4b06312d0993b..be29a739443e6 100644
--- a/src/test/parse-fail/keyword-final.rs
+++ b/src/test/parse-fail/keyword-final.rs
@@ -11,5 +11,5 @@
 // compile-flags: -Z parse-only
 
 fn main() {
-    let final = (); //~ ERROR `final` is a reserved keyword
+    let final = (); //~ ERROR expected pattern, found reserved keyword `final`
 }
diff --git a/src/test/parse-fail/keyword-fn-as-identifier.rs b/src/test/parse-fail/keyword-fn-as-identifier.rs
index 0ace9ddf1f074..0d454f67d1c05 100644
--- a/src/test/parse-fail/keyword-fn-as-identifier.rs
+++ b/src/test/parse-fail/keyword-fn-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py fn'
 
 fn main() {
-    let fn = "foo"; //~ error: ident
+    let fn = "foo"; //~ error: expected pattern, found keyword `fn`
 }
diff --git a/src/test/parse-fail/keyword-for-as-identifier.rs b/src/test/parse-fail/keyword-for-as-identifier.rs
index 035c87b80bb9c..d341669f7272b 100644
--- a/src/test/parse-fail/keyword-for-as-identifier.rs
+++ b/src/test/parse-fail/keyword-for-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py for'
 
 fn main() {
-    let for = "foo"; //~ error: ident
+    let for = "foo"; //~ error: expected pattern, found keyword `for`
 }
diff --git a/src/test/parse-fail/keyword-if-as-identifier.rs b/src/test/parse-fail/keyword-if-as-identifier.rs
index 1aad0a780f968..417e40425e03b 100644
--- a/src/test/parse-fail/keyword-if-as-identifier.rs
+++ b/src/test/parse-fail/keyword-if-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py if'
 
 fn main() {
-    let if = "foo"; //~ error: ident
+    let if = "foo"; //~ error: expected pattern, found keyword `if`
 }
diff --git a/src/test/parse-fail/keyword-impl-as-identifier.rs b/src/test/parse-fail/keyword-impl-as-identifier.rs
index 585109505d408..fe97c191f683c 100644
--- a/src/test/parse-fail/keyword-impl-as-identifier.rs
+++ b/src/test/parse-fail/keyword-impl-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py impl'
 
 fn main() {
-    let impl = "foo"; //~ error: ident
+    let impl = "foo"; //~ error: expected pattern, found keyword `impl`
 }
diff --git a/src/test/parse-fail/keyword-in-as-identifier.rs b/src/test/parse-fail/keyword-in-as-identifier.rs
new file mode 100644
index 0000000000000..c0f9396b98100
--- /dev/null
+++ b/src/test/parse-fail/keyword-in-as-identifier.rs
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z parse-only
+
+// This file was auto-generated using 'src/etc/generate-keyword-tests.py in'
+
+fn main() {
+    let in = "foo"; //~ error: expected pattern, found keyword `in`
+}
diff --git a/src/test/parse-fail/keyword-let-as-identifier.rs b/src/test/parse-fail/keyword-let-as-identifier.rs
index 07bc790168608..5d6dca78d7830 100644
--- a/src/test/parse-fail/keyword-let-as-identifier.rs
+++ b/src/test/parse-fail/keyword-let-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py let'
 
 fn main() {
-    let let = "foo"; //~ error: ident
+    let let = "foo"; //~ error: expected pattern, found keyword `let`
 }
diff --git a/src/test/parse-fail/keyword-loop-as-identifier.rs b/src/test/parse-fail/keyword-loop-as-identifier.rs
index 7b2b10a2d6dde..7c3d11d67f61c 100644
--- a/src/test/parse-fail/keyword-loop-as-identifier.rs
+++ b/src/test/parse-fail/keyword-loop-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py loop'
 
 fn main() {
-    let loop = "foo"; //~ error: ident
+    let loop = "foo"; //~ error: expected pattern, found keyword `loop`
 }
diff --git a/src/test/parse-fail/keyword-match-as-identifier.rs b/src/test/parse-fail/keyword-match-as-identifier.rs
index 528873c179432..7c727f44da764 100644
--- a/src/test/parse-fail/keyword-match-as-identifier.rs
+++ b/src/test/parse-fail/keyword-match-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py match'
 
 fn main() {
-    let match = "foo"; //~ error: ident
+    let match = "foo"; //~ error: expected pattern, found keyword `match`
 }
diff --git a/src/test/parse-fail/keyword-mod-as-identifier.rs b/src/test/parse-fail/keyword-mod-as-identifier.rs
index b29bcbc76c3cd..85b4cc2e02c6a 100644
--- a/src/test/parse-fail/keyword-mod-as-identifier.rs
+++ b/src/test/parse-fail/keyword-mod-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py mod'
 
 fn main() {
-    let mod = "foo"; //~ error: ident
+    let mod = "foo"; //~ error: expected pattern, found keyword `mod`
 }
diff --git a/src/test/parse-fail/keyword-move-as-identifier.rs b/src/test/parse-fail/keyword-move-as-identifier.rs
new file mode 100644
index 0000000000000..b785ac0058ccb
--- /dev/null
+++ b/src/test/parse-fail/keyword-move-as-identifier.rs
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z parse-only
+
+// This file was auto-generated using 'src/etc/generate-keyword-tests.py move'
+
+fn main() {
+    let move = "foo"; //~ error: expected pattern, found keyword `move`
+}
diff --git a/src/test/parse-fail/keyword-mut-as-identifier.rs b/src/test/parse-fail/keyword-mut-as-identifier.rs
index b637d07d8b6d0..0aeca9b34ab8c 100644
--- a/src/test/parse-fail/keyword-mut-as-identifier.rs
+++ b/src/test/parse-fail/keyword-mut-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -10,8 +10,6 @@
 
 // compile-flags: -Z parse-only
 
-// This file was auto-generated using 'src/etc/generate-keyword-tests.py mut'
-
 fn main() {
-    let mut = "foo"; //~ error: ident
+    let mut = "foo"; //~ error: expected identifier, found `=`
 }
diff --git a/src/test/parse-fail/keyword-override.rs b/src/test/parse-fail/keyword-override.rs
index 3f79e437189c2..60333762b33e0 100644
--- a/src/test/parse-fail/keyword-override.rs
+++ b/src/test/parse-fail/keyword-override.rs
@@ -11,5 +11,5 @@
 // compile-flags: -Z parse-only
 
 fn main() {
-    let override = (); //~ ERROR `override` is a reserved keyword
+    let override = (); //~ ERROR expected pattern, found reserved keyword `override`
 }
diff --git a/src/test/parse-fail/keyword-pub-as-identifier.rs b/src/test/parse-fail/keyword-pub-as-identifier.rs
index 959bbfbf88264..9233728697026 100644
--- a/src/test/parse-fail/keyword-pub-as-identifier.rs
+++ b/src/test/parse-fail/keyword-pub-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py pub'
 
 fn main() {
-    let pub = "foo"; //~ error: ident
+    let pub = "foo"; //~ error: expected pattern, found keyword `pub`
 }
diff --git a/src/test/parse-fail/keyword-ref-as-identifier.rs b/src/test/parse-fail/keyword-ref-as-identifier.rs
index 3db6d11c2e89c..a689c4eeea413 100644
--- a/src/test/parse-fail/keyword-ref-as-identifier.rs
+++ b/src/test/parse-fail/keyword-ref-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -10,8 +10,6 @@
 
 // compile-flags: -Z parse-only
 
-// This file was auto-generated using 'src/etc/generate-keyword-tests.py ref'
-
 fn main() {
-    let ref = "foo"; //~ error: ident
+    let ref = "foo"; //~ error: expected identifier, found `=`
 }
diff --git a/src/test/parse-fail/keyword-return-as-identifier.rs b/src/test/parse-fail/keyword-return-as-identifier.rs
index df8aeba6d7164..bcf7f13754315 100644
--- a/src/test/parse-fail/keyword-return-as-identifier.rs
+++ b/src/test/parse-fail/keyword-return-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py return'
 
 fn main() {
-    let return = "foo"; //~ error: ident
+    let return = "foo"; //~ error: expected pattern, found keyword `return`
 }
diff --git a/src/test/parse-fail/keyword-self-as-identifier.rs b/src/test/parse-fail/keyword-self-as-identifier.rs
index 0e0d07ca6a537..f8b93a1796bfe 100644
--- a/src/test/parse-fail/keyword-self-as-identifier.rs
+++ b/src/test/parse-fail/keyword-self-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -10,8 +10,6 @@
 
 // compile-flags: -Z parse-only
 
-// This file was auto-generated using 'src/etc/generate-keyword-tests.py self'
-
 fn main() {
-    let self = "foo"; //~ error: ident
+    let Self = "foo"; //~ error: expected identifier, found keyword `Self`
 }
diff --git a/src/test/parse-fail/keyword-static-as-identifier.rs b/src/test/parse-fail/keyword-static-as-identifier.rs
index d5b529af4b80e..793262266a39f 100644
--- a/src/test/parse-fail/keyword-static-as-identifier.rs
+++ b/src/test/parse-fail/keyword-static-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py static'
 
 fn main() {
-    let static = "foo"; //~ error: ident
+    let static = "foo"; //~ error: expected pattern, found keyword `static`
 }
diff --git a/src/test/parse-fail/keyword-struct-as-identifier.rs b/src/test/parse-fail/keyword-struct-as-identifier.rs
index 7d2160dfd2937..591bd25db65da 100644
--- a/src/test/parse-fail/keyword-struct-as-identifier.rs
+++ b/src/test/parse-fail/keyword-struct-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py struct'
 
 fn main() {
-    let struct = "foo"; //~ error: ident
+    let struct = "foo"; //~ error: expected pattern, found keyword `struct`
 }
diff --git a/src/test/parse-fail/keyword-super-as-identifier.rs b/src/test/parse-fail/keyword-super-as-identifier.rs
index 4d86691886865..a48683a4f54dc 100644
--- a/src/test/parse-fail/keyword-super-as-identifier.rs
+++ b/src/test/parse-fail/keyword-super-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -10,8 +10,6 @@
 
 // compile-flags: -Z parse-only
 
-// This file was auto-generated using 'src/etc/generate-keyword-tests.py super'
-
 fn main() {
-    let super = "foo"; //~ error: ident
+    let super = "foo"; //~ error: expected identifier, found keyword `super`
 }
diff --git a/src/test/parse-fail/keyword-trait-as-identifier.rs b/src/test/parse-fail/keyword-trait-as-identifier.rs
index 7a8be0baa27f5..bdb5d264b031e 100644
--- a/src/test/parse-fail/keyword-trait-as-identifier.rs
+++ b/src/test/parse-fail/keyword-trait-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py trait'
 
 fn main() {
-    let trait = "foo"; //~ error: ident
+    let trait = "foo"; //~ error: expected pattern, found keyword `trait`
 }
diff --git a/src/test/parse-fail/keyword-type-as-identifier.rs b/src/test/parse-fail/keyword-type-as-identifier.rs
index c76bea89ab40d..2ba99d098deef 100644
--- a/src/test/parse-fail/keyword-type-as-identifier.rs
+++ b/src/test/parse-fail/keyword-type-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py type'
 
 fn main() {
-    let type = "foo"; //~ error: ident
+    let type = "foo"; //~ error: expected pattern, found keyword `type`
 }
diff --git a/src/test/parse-fail/keyword-typeof.rs b/src/test/parse-fail/keyword-typeof.rs
index 29ec4f5844b1e..40e26bd375ae5 100644
--- a/src/test/parse-fail/keyword-typeof.rs
+++ b/src/test/parse-fail/keyword-typeof.rs
@@ -11,5 +11,5 @@
 // compile-flags: -Z parse-only
 
 fn main() {
-    let typeof = (); //~ ERROR `typeof` is a reserved keyword
+    let typeof = (); //~ ERROR expected pattern, found reserved keyword `typeof`
 }
diff --git a/src/test/parse-fail/keyword-unsafe-as-identifier.rs b/src/test/parse-fail/keyword-unsafe-as-identifier.rs
index d3c48c6ded0d0..a72723e566dde 100644
--- a/src/test/parse-fail/keyword-unsafe-as-identifier.rs
+++ b/src/test/parse-fail/keyword-unsafe-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py unsafe'
 
 fn main() {
-    let unsafe = "foo"; //~ error: ident
+    let unsafe = "foo"; //~ error: expected pattern, found keyword `unsafe`
 }
diff --git a/src/test/parse-fail/keyword-use-as-identifier.rs b/src/test/parse-fail/keyword-use-as-identifier.rs
index d3815c650a36f..de74907ff2097 100644
--- a/src/test/parse-fail/keyword-use-as-identifier.rs
+++ b/src/test/parse-fail/keyword-use-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py use'
 
 fn main() {
-    let use = "foo"; //~ error: ident
+    let use = "foo"; //~ error: expected pattern, found keyword `use`
 }
diff --git a/src/test/parse-fail/keyword-where-as-identifier.rs b/src/test/parse-fail/keyword-where-as-identifier.rs
new file mode 100644
index 0000000000000..4b7c8920b13ca
--- /dev/null
+++ b/src/test/parse-fail/keyword-where-as-identifier.rs
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z parse-only
+
+// This file was auto-generated using 'src/etc/generate-keyword-tests.py where'
+
+fn main() {
+    let where = "foo"; //~ error: expected pattern, found keyword `where`
+}
diff --git a/src/test/parse-fail/keyword-while-as-identifier.rs b/src/test/parse-fail/keyword-while-as-identifier.rs
index 331fdc07cc15c..01793caa38a8a 100644
--- a/src/test/parse-fail/keyword-while-as-identifier.rs
+++ b/src/test/parse-fail/keyword-while-as-identifier.rs
@@ -1,4 +1,4 @@
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
 // file at the top-level directory of this distribution and at
 // http://rust-lang.org/COPYRIGHT.
 //
@@ -13,5 +13,5 @@
 // This file was auto-generated using 'src/etc/generate-keyword-tests.py while'
 
 fn main() {
-    let while = "foo"; //~ error: ident
+    let while = "foo"; //~ error: expected pattern, found keyword `while`
 }
diff --git a/src/test/parse-fail/keywords-followed-by-double-colon.rs b/src/test/parse-fail/keywords-followed-by-double-colon.rs
index 5e27d3e4f383e..bb8a1dfdb1906 100644
--- a/src/test/parse-fail/keywords-followed-by-double-colon.rs
+++ b/src/test/parse-fail/keywords-followed-by-double-colon.rs
@@ -12,5 +12,5 @@
 
 fn main() {
     struct::foo();  //~ ERROR expected identifier
-    mut::baz(); //~ ERROR expected identifier
+    mut::baz(); //~ ERROR expected expression, found keyword `mut`
 }
diff --git a/src/test/parse-fail/lifetime-no-keyword.rs b/src/test/parse-fail/lifetime-no-keyword.rs
index 84b02e6ba0964..9ca81d9918ef3 100644
--- a/src/test/parse-fail/lifetime-no-keyword.rs
+++ b/src/test/parse-fail/lifetime-no-keyword.rs
@@ -12,6 +12,7 @@
 
 fn foo<'a>(a: &'a isize) { }
 fn bar(a: &'static isize) { }
-fn baz(a: &'let isize) { } //~ ERROR invalid lifetime name
+fn baz(a: &'let isize) { } //~ ERROR lifetimes cannot use keyword names
+fn zab(a: &'self isize) { } //~ ERROR lifetimes cannot use keyword names
 
 fn main() { }
diff --git a/src/test/parse-fail/match-arrows-block-then-binop.rs b/src/test/parse-fail/match-arrows-block-then-binop.rs
index 3026e159a4439..e8cfb77f05963 100644
--- a/src/test/parse-fail/match-arrows-block-then-binop.rs
+++ b/src/test/parse-fail/match-arrows-block-then-binop.rs
@@ -14,6 +14,6 @@ fn main() {
 
     match 0 {
       0 => {
-      } + 5 //~ ERROR unexpected token: `+`
+      } + 5 //~ ERROR expected pattern, found `+`
     }
 }
diff --git a/src/test/parse-fail/obsolete-proc.rs b/src/test/parse-fail/obsolete-proc.rs
index 1ef8cd2714d7f..648c46d246cbc 100644
--- a/src/test/parse-fail/obsolete-proc.rs
+++ b/src/test/parse-fail/obsolete-proc.rs
@@ -12,9 +12,8 @@
 
 // Test that we generate obsolete syntax errors around usages of `proc`.
 
-fn foo(p: proc()) { } //~ ERROR `proc` is a reserved keyword
+fn foo(p: proc()) { } //~ ERROR expected type, found reserved keyword `proc`
 
-fn bar() { proc() 1; } //~ ERROR `proc` is a reserved keyword
-                       //~^ ERROR expected
+fn bar() { proc() 1; } //~ ERROR expected expression, found reserved keyword `proc`
 
 fn main() { }
diff --git a/src/test/parse-fail/removed-syntax-mode.rs b/src/test/parse-fail/removed-syntax-mode.rs
index 4dafc36e912c5..6e99f8b3eeadc 100644
--- a/src/test/parse-fail/removed-syntax-mode.rs
+++ b/src/test/parse-fail/removed-syntax-mode.rs
@@ -10,4 +10,4 @@
 
 // compile-flags: -Z parse-only
 
-fn f(+x: isize) {} //~ ERROR unexpected token: `+`
+fn f(+x: isize) {} //~ ERROR expected pattern, found `+`
diff --git a/src/test/parse-fail/removed-syntax-mut-vec-expr.rs b/src/test/parse-fail/removed-syntax-mut-vec-expr.rs
index 301bd0e8b1c9c..7e5bd27b497cb 100644
--- a/src/test/parse-fail/removed-syntax-mut-vec-expr.rs
+++ b/src/test/parse-fail/removed-syntax-mut-vec-expr.rs
@@ -11,7 +11,5 @@
 // compile-flags: -Z parse-only
 
 fn f() {
-    let v = [mut 1, 2, 3, 4];
-    //~^  ERROR expected identifier, found keyword `mut`
-    //~^^ ERROR expected one of `!`, `,`, `.`, `::`, `;`, `?`, `]`, `{`, or an operator, found `1`
+    let v = [mut 1, 2, 3, 4]; //~ ERROR expected expression, found keyword `mut`
 }
diff --git a/src/test/parse-fail/removed-syntax-mut-vec-ty.rs b/src/test/parse-fail/removed-syntax-mut-vec-ty.rs
index 91918f01bb03e..0cdf1981a231b 100644
--- a/src/test/parse-fail/removed-syntax-mut-vec-ty.rs
+++ b/src/test/parse-fail/removed-syntax-mut-vec-ty.rs
@@ -10,6 +10,4 @@
 
 // compile-flags: -Z parse-only
 
-type v = [mut isize];
-    //~^  ERROR expected identifier, found keyword `mut`
-    //~^^ ERROR expected one of `!`, `(`, `+`, `::`, `;`, `<`, or `]`, found `isize`
+type v = [mut isize]; //~ ERROR expected type, found keyword `mut`
diff --git a/src/test/parse-fail/removed-syntax-uniq-mut-expr.rs b/src/test/parse-fail/removed-syntax-uniq-mut-expr.rs
index 2f637cf0b4e76..b16c77ab6b5be 100644
--- a/src/test/parse-fail/removed-syntax-uniq-mut-expr.rs
+++ b/src/test/parse-fail/removed-syntax-uniq-mut-expr.rs
@@ -11,7 +11,5 @@
 // compile-flags: -Z parse-only
 
 fn f() {
-    let a_box = box mut 42;
-    //~^  ERROR expected identifier, found keyword `mut`
-    //~^^ ERROR expected one of `!`, `.`, `::`, `;`, `?`, `{`, or an operator, found `42`
+    let a_box = box mut 42; //~ ERROR expected expression, found keyword `mut`
 }
diff --git a/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs b/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs
index e1637901266e0..9bd8dc9b11b21 100644
--- a/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs
+++ b/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs
@@ -10,6 +10,4 @@
 
 // compile-flags: -Z parse-only
 
-type mut_box = Box<mut isize>;
-    //~^  ERROR expected identifier, found keyword `mut`
-    //~^^ ERROR expected one of `!`, `(`, `+`, `,`, `::`, `<`, or `>`, found `isize`
+type mut_box = Box<mut isize>; //~ ERROR expected type, found keyword `mut`
diff --git a/src/test/parse-fail/unsized2.rs b/src/test/parse-fail/unsized2.rs
index a4a4c0dcfd916..f3af8740be948 100644
--- a/src/test/parse-fail/unsized2.rs
+++ b/src/test/parse-fail/unsized2.rs
@@ -15,8 +15,5 @@
 fn f<X>() {}
 
 pub fn main() {
-    f<type>();
-    //~^ ERROR expected identifier, found keyword `type`
-    //~^^ ERROR: chained comparison
-    //~^^^ HELP: use `::<
+    f<type>(); //~ ERROR expected expression, found keyword `type`
 }
diff --git a/src/test/run-pass/use-keyword-2.rs b/src/test/run-pass/use-keyword-2.rs
new file mode 100644
index 0000000000000..60016f5959469
--- /dev/null
+++ b/src/test/run-pass/use-keyword-2.rs
@@ -0,0 +1,30 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub struct A;
+
+mod test {
+    pub use super :: A;
+
+    pub use self :: A as B;
+}
+
+impl A {
+    fn f() {}
+    fn g() {
+        Self :: f()
+    }
+}
+
+fn main() {
+    let a: A = test::A;
+    let b: A = test::B;
+    let c: () = A::g();
+}