diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs
index 0fdb6dc068dd9..6504c1232ff09 100644
--- a/src/librustc/dep_graph/graph.rs
+++ b/src/librustc/dep_graph/graph.rs
@@ -8,6 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use errors::DiagnosticBuilder;
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
                                            StableHashingContextProvider};
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
@@ -568,6 +569,24 @@ impl DepGraph {
                       "DepGraph::try_mark_green() - Duplicate fingerprint \
                       insertion for {:?}", dep_node);
 
+        // ... emitting any stored diagnostic ...
+        {
+            let diagnostics = tcx.on_disk_query_result_cache
+                                 .load_diagnostics(prev_dep_node_index);
+
+            if diagnostics.len() > 0 {
+                let handle = tcx.sess.diagnostic();
+
+                // Promote the previous diagnostics to the current session.
+                tcx.on_disk_query_result_cache
+                   .store_diagnostics(dep_node_index, diagnostics.clone());
+
+                for diagnostic in diagnostics {
+                    DiagnosticBuilder::new_diagnostic(handle, diagnostic).emit();
+                }
+            }
+        }
+
         // ... and finally storing a "Green" entry in the color map.
         let old_color = data.colors
                             .borrow_mut()
diff --git a/src/librustc/dep_graph/mod.rs b/src/librustc/dep_graph/mod.rs
index fe0212423f6ef..a472183698abf 100644
--- a/src/librustc/dep_graph/mod.rs
+++ b/src/librustc/dep_graph/mod.rs
@@ -26,4 +26,4 @@ pub use self::prev::PreviousDepGraph;
 pub use self::query::DepGraphQuery;
 pub use self::safe::AssertDepGraphSafe;
 pub use self::safe::DepGraphSafe;
-pub use self::serialized::SerializedDepGraph;
+pub use self::serialized::{SerializedDepGraph, SerializedDepNodeIndex};
diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs
index 64fe4626d6ee6..a81d314ac84dc 100644
--- a/src/librustc/lib.rs
+++ b/src/librustc/lib.rs
@@ -46,6 +46,7 @@
 #![feature(const_fn)]
 #![feature(core_intrinsics)]
 #![feature(i128_type)]
+#![feature(inclusive_range_syntax)]
 #![cfg_attr(windows, feature(libc))]
 #![feature(never_type)]
 #![feature(nonzero)]
diff --git a/src/librustc/ty/codec.rs b/src/librustc/ty/codec.rs
new file mode 100644
index 0000000000000..1c793920bf2e4
--- /dev/null
+++ b/src/librustc/ty/codec.rs
@@ -0,0 +1,243 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This module contains some shared code for encoding and decoding various
+// things from the `ty` module, and in particular implements support for
+// "shorthands" which allow to have pointers back into the already encoded
+// stream instead of re-encoding the same thing twice.
+//
+// The functionality in here is shared between persisting to crate metadata and
+// persisting to incr. comp. caches.
+
+use hir::def_id::{DefId, CrateNum};
+use middle::const_val::ByteArray;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_serialize::{Decodable, Decoder, Encoder, Encodable};
+use std::hash::Hash;
+use std::intrinsics;
+use ty::{self, Ty, TyCtxt};
+use ty::subst::Substs;
+
+/// The shorthand encoding uses an enum's variant index `usize`
+/// and is offset by this value so it never matches a real variant.
+/// This offset is also chosen so that the first byte is never < 0x80.
+pub const SHORTHAND_OFFSET: usize = 0x80;
+
+pub trait EncodableWithShorthand: Clone + Eq + Hash {
+    type Variant: Encodable;
+    fn variant(&self) -> &Self::Variant;
+}
+
+impl<'tcx> EncodableWithShorthand for Ty<'tcx> {
+    type Variant = ty::TypeVariants<'tcx>;
+    fn variant(&self) -> &Self::Variant {
+        &self.sty
+    }
+}
+
+impl<'tcx> EncodableWithShorthand for ty::Predicate<'tcx> {
+    type Variant = ty::Predicate<'tcx>;
+    fn variant(&self) -> &Self::Variant {
+        self
+    }
+}
+
+pub trait TyEncoder: Encoder {
+    fn position(&self) -> usize;
+}
+
+/// Encode the given value or a previously cached shorthand.
+pub fn encode_with_shorthand<E, T, M>(encoder: &mut E,
+                                      value: &T,
+                                      cache: M)
+                                      -> Result<(), E::Error>
+    where E: TyEncoder,
+          M: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap<T, usize>,
+          T: EncodableWithShorthand,
+{
+    let existing_shorthand = cache(encoder).get(value).cloned();
+    if let Some(shorthand) = existing_shorthand {
+        return encoder.emit_usize(shorthand);
+    }
+
+    let variant = value.variant();
+
+    let start = encoder.position();
+    variant.encode(encoder)?;
+    let len = encoder.position() - start;
+
+    // The shorthand encoding uses the same usize as the
+    // discriminant, with an offset so they can't conflict.
+    let discriminant = unsafe { intrinsics::discriminant_value(variant) };
+    assert!(discriminant < SHORTHAND_OFFSET as u64);
+    let shorthand = start + SHORTHAND_OFFSET;
+
+    // Get the number of bits that leb128 could fit
+    // in the same space as the fully encoded type.
+    let leb128_bits = len * 7;
+
+    // Check that the shorthand is a not longer than the
+    // full encoding itself, i.e. it's an obvious win.
+    if leb128_bits >= 64 || (shorthand as u64) < (1 << leb128_bits) {
+        cache(encoder).insert(value.clone(), shorthand);
+    }
+
+    Ok(())
+}
+
+pub fn encode_predicates<'tcx, E, C>(encoder: &mut E,
+                                     predicates: &ty::GenericPredicates<'tcx>,
+                                     cache: C)
+                                     -> Result<(), E::Error>
+    where E: TyEncoder,
+          C: for<'b> Fn(&'b mut E) -> &'b mut FxHashMap<ty::Predicate<'tcx>, usize>,
+{
+    predicates.parent.encode(encoder)?;
+    predicates.predicates.len().encode(encoder)?;
+    for predicate in &predicates.predicates {
+        encode_with_shorthand(encoder, predicate, &cache)?
+    }
+    Ok(())
+}
+
+pub trait TyDecoder<'a, 'tcx: 'a>: Decoder {
+
+    fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
+
+    fn peek_byte(&self) -> u8;
+
+    fn cached_ty_for_shorthand<F>(&mut self,
+                                  shorthand: usize,
+                                  or_insert_with: F)
+                                  -> Result<Ty<'tcx>, Self::Error>
+        where F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>;
+
+    fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
+        where F: FnOnce(&mut Self) -> R;
+
+    fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum;
+
+    fn positioned_at_shorthand(&self) -> bool {
+        (self.peek_byte() & (SHORTHAND_OFFSET as u8)) != 0
+    }
+}
+
+pub fn decode_cnum<'a, 'tcx, D>(decoder: &mut D) -> Result<CrateNum, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    let cnum = CrateNum::from_u32(u32::decode(decoder)?);
+    Ok(decoder.map_encoded_cnum_to_current(cnum))
+}
+
+pub fn decode_ty<'a, 'tcx, D>(decoder: &mut D) -> Result<Ty<'tcx>, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    // Handle shorthands first, if we have an usize > 0x80.
+    // if self.opaque.data[self.opaque.position()] & 0x80 != 0 {
+    if decoder.positioned_at_shorthand() {
+        let pos = decoder.read_usize()?;
+        assert!(pos >= SHORTHAND_OFFSET);
+        let shorthand = pos - SHORTHAND_OFFSET;
+
+        decoder.cached_ty_for_shorthand(shorthand, |decoder| {
+            decoder.with_position(shorthand, Ty::decode)
+        })
+    } else {
+        let tcx = decoder.tcx();
+        Ok(tcx.mk_ty(ty::TypeVariants::decode(decoder)?))
+    }
+}
+
+pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D)
+                                      -> Result<ty::GenericPredicates<'tcx>, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    Ok(ty::GenericPredicates {
+        parent: Decodable::decode(decoder)?,
+        predicates: (0..decoder.read_usize()?).map(|_| {
+                // Handle shorthands first, if we have an usize > 0x80.
+                if decoder.positioned_at_shorthand() {
+                    let pos = decoder.read_usize()?;
+                    assert!(pos >= SHORTHAND_OFFSET);
+                    let shorthand = pos - SHORTHAND_OFFSET;
+
+                    decoder.with_position(shorthand, ty::Predicate::decode)
+                } else {
+                    ty::Predicate::decode(decoder)
+                }
+            })
+            .collect::<Result<Vec<_>, _>>()?,
+    })
+}
+
+pub fn decode_substs<'a, 'tcx, D>(decoder: &mut D) -> Result<&'tcx Substs<'tcx>, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    let len = decoder.read_usize()?;
+    let tcx = decoder.tcx();
+    Ok(tcx.mk_substs((0..len).map(|_| Decodable::decode(decoder)))?)
+}
+
+pub fn decode_region<'a, 'tcx, D>(decoder: &mut D) -> Result<ty::Region<'tcx>, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    Ok(decoder.tcx().mk_region(Decodable::decode(decoder)?))
+}
+
+pub fn decode_ty_slice<'a, 'tcx, D>(decoder: &mut D)
+                                    -> Result<&'tcx ty::Slice<Ty<'tcx>>, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    let len = decoder.read_usize()?;
+    Ok(decoder.tcx().mk_type_list((0..len).map(|_| Decodable::decode(decoder)))?)
+}
+
+pub fn decode_adt_def<'a, 'tcx, D>(decoder: &mut D)
+                                   -> Result<&'tcx ty::AdtDef, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    let def_id = DefId::decode(decoder)?;
+    Ok(decoder.tcx().adt_def(def_id))
+}
+
+pub fn decode_existential_predicate_slice<'a, 'tcx, D>(decoder: &mut D)
+    -> Result<&'tcx ty::Slice<ty::ExistentialPredicate<'tcx>>, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    let len = decoder.read_usize()?;
+    Ok(decoder.tcx()
+              .mk_existential_predicates((0..len).map(|_| Decodable::decode(decoder)))?)
+}
+
+pub fn decode_byte_array<'a, 'tcx, D>(decoder: &mut D)
+                                      -> Result<ByteArray<'tcx>, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    Ok(ByteArray {
+        data: decoder.tcx().alloc_byte_array(&Vec::decode(decoder)?)
+    })
+}
+
+pub fn decode_const<'a, 'tcx, D>(decoder: &mut D)
+                                 -> Result<&'tcx ty::Const<'tcx>, D::Error>
+    where D: TyDecoder<'a, 'tcx>,
+          'tcx: 'a,
+{
+    Ok(decoder.tcx().mk_const(Decodable::decode(decoder)?))
+}
diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs
index 5e9396068c8b6..c577cf4af3d07 100644
--- a/src/librustc/ty/context.rs
+++ b/src/librustc/ty/context.rs
@@ -853,6 +853,11 @@ pub struct GlobalCtxt<'tcx> {
 
     pub dep_graph: DepGraph,
 
+    /// This provides access to the incr. comp. on-disk cache for query results.
+    /// Do not access this directly. It is only meant to be used by
+    /// `DepGraph::try_mark_green()` and the query infrastructure in `ty::maps`.
+    pub(crate) on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
+
     /// Common types, pre-interned for your convenience.
     pub types: CommonTypes<'tcx>,
 
@@ -1054,6 +1059,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
                                   resolutions: ty::Resolutions,
                                   named_region_map: resolve_lifetime::NamedRegionMap,
                                   hir: hir_map::Map<'tcx>,
+                                  on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
                                   crate_name: &str,
                                   tx: mpsc::Sender<Box<Any + Send>>,
                                   output_filenames: &OutputFilenames,
@@ -1137,6 +1143,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
             global_arenas: arenas,
             global_interners: interners,
             dep_graph: dep_graph.clone(),
+            on_disk_query_result_cache,
             types: common_types,
             named_region_map: NamedRegionMap {
                 defs,
@@ -1298,6 +1305,15 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
             self.in_scope_traits_map(def_index);
         }
     }
+
+    pub fn serialize_query_result_cache<E>(self,
+                                           encoder: &mut E)
+                                           -> Result<(), E::Error>
+        where E: ::rustc_serialize::Encoder
+    {
+        self.on_disk_query_result_cache.serialize(encoder)
+    }
+
 }
 
 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
diff --git a/src/librustc/ty/maps/mod.rs b/src/librustc/ty/maps/mod.rs
index e588cdc52d886..527142ff38bac 100644
--- a/src/librustc/ty/maps/mod.rs
+++ b/src/librustc/ty/maps/mod.rs
@@ -70,6 +70,9 @@ mod config;
 pub use self::config::QueryConfig;
 use self::config::QueryDescription;
 
+mod on_disk_cache;
+pub use self::on_disk_cache::OnDiskCache;
+
 // Each of these maps also corresponds to a method on a
 // `Provider` trait for requesting a value of that type,
 // and a method on `Maps` itself for doing that in a
diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs
new file mode 100644
index 0000000000000..26581501234af
--- /dev/null
+++ b/src/librustc/ty/maps/on_disk_cache.rs
@@ -0,0 +1,231 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use dep_graph::{DepNodeIndex, SerializedDepNodeIndex};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::indexed_vec::Idx;
+use errors::Diagnostic;
+use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque,
+                      SpecializedDecoder};
+use session::Session;
+use std::borrow::Cow;
+use std::cell::RefCell;
+use std::collections::BTreeMap;
+use std::mem;
+use syntax::codemap::{CodeMap, StableFilemapId};
+use syntax_pos::{BytePos, Span, NO_EXPANSION, DUMMY_SP};
+
+/// `OnDiskCache` provides an interface to incr. comp. data cached from the
+/// previous compilation session. This data will eventually include the results
+/// of a few selected queries (like `typeck_tables_of` and `mir_optimized`) and
+/// any diagnostics that have been emitted during a query.
+pub struct OnDiskCache<'sess> {
+    // The diagnostics emitted during the previous compilation session.
+    prev_diagnostics: FxHashMap<SerializedDepNodeIndex, Vec<Diagnostic>>,
+
+    // This field collects all Diagnostics emitted during the current
+    // compilation session.
+    current_diagnostics: RefCell<FxHashMap<DepNodeIndex, Vec<Diagnostic>>>,
+
+    // This will eventually be needed for creating Decoders that can rebase
+    // spans.
+    _prev_filemap_starts: BTreeMap<BytePos, StableFilemapId>,
+    codemap: &'sess CodeMap,
+}
+
+// This type is used only for (de-)serialization.
+#[derive(RustcEncodable, RustcDecodable)]
+struct Header {
+    prev_filemap_starts: BTreeMap<BytePos, StableFilemapId>,
+}
+
+// This type is used only for (de-)serialization.
+#[derive(RustcEncodable, RustcDecodable)]
+struct Body {
+    diagnostics: Vec<(SerializedDepNodeIndex, Vec<Diagnostic>)>,
+}
+
+impl<'sess> OnDiskCache<'sess> {
+    /// Create a new OnDiskCache instance from the serialized data in `data`.
+    /// Note that the current implementation (which only deals with diagnostics
+    /// so far) will eagerly deserialize the complete cache. Once we are
+    /// dealing with larger amounts of data (i.e. cached query results),
+    /// deserialization will need to happen lazily.
+    pub fn new(sess: &'sess Session, data: &[u8]) -> OnDiskCache<'sess> {
+        debug_assert!(sess.opts.incremental.is_some());
+
+        let mut decoder = opaque::Decoder::new(&data[..], 0);
+        let header = Header::decode(&mut decoder).unwrap();
+
+        let prev_diagnostics: FxHashMap<_, _> = {
+            let mut decoder = CacheDecoder {
+                opaque: decoder,
+                codemap: sess.codemap(),
+                prev_filemap_starts: &header.prev_filemap_starts,
+            };
+            let body = Body::decode(&mut decoder).unwrap();
+            body.diagnostics.into_iter().collect()
+        };
+
+        OnDiskCache {
+            prev_diagnostics,
+            _prev_filemap_starts: header.prev_filemap_starts,
+            codemap: sess.codemap(),
+            current_diagnostics: RefCell::new(FxHashMap()),
+        }
+    }
+
+    pub fn new_empty(codemap: &'sess CodeMap) -> OnDiskCache<'sess> {
+        OnDiskCache {
+            prev_diagnostics: FxHashMap(),
+            _prev_filemap_starts: BTreeMap::new(),
+            codemap,
+            current_diagnostics: RefCell::new(FxHashMap()),
+        }
+    }
+
+    pub fn serialize<'a, 'tcx, E>(&self,
+                                  encoder: &mut E)
+                                  -> Result<(), E::Error>
+        where E: Encoder
+    {
+        let prev_filemap_starts: BTreeMap<_, _> = self
+            .codemap
+            .files()
+            .iter()
+            .map(|fm| (fm.start_pos, StableFilemapId::new(fm)))
+            .collect();
+
+        Header { prev_filemap_starts }.encode(encoder)?;
+
+        let diagnostics: Vec<(SerializedDepNodeIndex, Vec<Diagnostic>)> =
+            self.current_diagnostics
+                .borrow()
+                .iter()
+                .map(|(k, v)| (SerializedDepNodeIndex::new(k.index()), v.clone()))
+                .collect();
+
+        Body { diagnostics }.encode(encoder)?;
+
+        Ok(())
+    }
+
+    /// Load a diagnostic emitted during the previous compilation session.
+    pub fn load_diagnostics(&self,
+                            dep_node_index: SerializedDepNodeIndex)
+                            -> Vec<Diagnostic> {
+        self.prev_diagnostics.get(&dep_node_index).cloned().unwrap_or(vec![])
+    }
+
+    /// Store a diagnostic emitted during the current compilation session.
+    /// Anything stored like this will be available via `load_diagnostics` in
+    /// the next compilation session.
+    pub fn store_diagnostics(&self,
+                             dep_node_index: DepNodeIndex,
+                             diagnostics: Vec<Diagnostic>) {
+        let mut current_diagnostics = self.current_diagnostics.borrow_mut();
+        let prev = current_diagnostics.insert(dep_node_index, diagnostics);
+        debug_assert!(prev.is_none());
+    }
+
+    /// Store a diagnostic emitted during computation of an anonymous query.
+    /// Since many anonymous queries can share the same `DepNode`, we aggregate
+    /// them -- as opposed to regular queries where we assume that there is a
+    /// 1:1 relationship between query-key and `DepNode`.
+    pub fn store_diagnostics_for_anon_node(&self,
+                                           dep_node_index: DepNodeIndex,
+                                           mut diagnostics: Vec<Diagnostic>) {
+        let mut current_diagnostics = self.current_diagnostics.borrow_mut();
+
+        let x = current_diagnostics.entry(dep_node_index).or_insert_with(|| {
+            mem::replace(&mut diagnostics, Vec::new())
+        });
+
+        x.extend(diagnostics.into_iter());
+    }
+}
+
+/// A decoder that can read the incr. comp. cache. It is similar to the one
+/// we use for crate metadata decoding in that it can rebase spans and
+/// eventually will also handle things that contain `Ty` instances.
+struct CacheDecoder<'a> {
+    opaque: opaque::Decoder<'a>,
+    codemap: &'a CodeMap,
+    prev_filemap_starts: &'a BTreeMap<BytePos, StableFilemapId>,
+}
+
+impl<'a> CacheDecoder<'a> {
+    fn find_filemap_prev_bytepos(&self,
+                                 prev_bytepos: BytePos)
+                                 -> Option<(BytePos, StableFilemapId)> {
+        for (start, id) in self.prev_filemap_starts.range(BytePos(0) ... prev_bytepos).rev() {
+            return Some((*start, *id))
+        }
+
+        None
+    }
+}
+
+macro_rules! decoder_methods {
+    ($($name:ident -> $ty:ty;)*) => {
+        $(fn $name(&mut self) -> Result<$ty, Self::Error> {
+            self.opaque.$name()
+        })*
+    }
+}
+
+impl<'sess> Decoder for CacheDecoder<'sess> {
+    type Error = String;
+
+    decoder_methods! {
+        read_nil -> ();
+
+        read_u128 -> u128;
+        read_u64 -> u64;
+        read_u32 -> u32;
+        read_u16 -> u16;
+        read_u8 -> u8;
+        read_usize -> usize;
+
+        read_i128 -> i128;
+        read_i64 -> i64;
+        read_i32 -> i32;
+        read_i16 -> i16;
+        read_i8 -> i8;
+        read_isize -> isize;
+
+        read_bool -> bool;
+        read_f64 -> f64;
+        read_f32 -> f32;
+        read_char -> char;
+        read_str -> Cow<str>;
+    }
+
+    fn error(&mut self, err: &str) -> Self::Error {
+        self.opaque.error(err)
+    }
+}
+
+impl<'a> SpecializedDecoder<Span> for CacheDecoder<'a> {
+    fn specialized_decode(&mut self) -> Result<Span, Self::Error> {
+        let lo = BytePos::decode(self)?;
+        let hi = BytePos::decode(self)?;
+
+        if let Some((prev_filemap_start, filemap_id)) = self.find_filemap_prev_bytepos(lo) {
+            if let Some(current_filemap) = self.codemap.filemap_by_stable_id(filemap_id) {
+                let lo = (lo + current_filemap.start_pos) - prev_filemap_start;
+                let hi = (hi + current_filemap.start_pos) - prev_filemap_start;
+                return Ok(Span::new(lo, hi, NO_EXPANSION));
+            }
+        }
+
+        Ok(DUMMY_SP)
+    }
+}
diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs
index cce968177175b..732adcefcdea1 100644
--- a/src/librustc/ty/maps/plumbing.rs
+++ b/src/librustc/ty/maps/plumbing.rs
@@ -13,14 +13,14 @@
 //! provider, manage the caches, and so forth.
 
 use dep_graph::{DepNodeIndex, DepNode, DepKind, DepNodeColor};
-use errors::{Diagnostic, DiagnosticBuilder};
+use errors::DiagnosticBuilder;
 use ty::{TyCtxt};
 use ty::maps::Query; // NB: actually generated by the macros in this file
 use ty::maps::config::QueryDescription;
 use ty::item_path;
 
 use rustc_data_structures::fx::{FxHashMap};
-use std::cell::{RefMut, Cell};
+use std::cell::RefMut;
 use std::marker::PhantomData;
 use std::mem;
 use syntax_pos::Span;
@@ -33,34 +33,19 @@ pub(super) struct QueryMap<D: QueryDescription> {
 pub(super) struct QueryValue<T> {
     pub(super) value: T,
     pub(super) index: DepNodeIndex,
-    pub(super) diagnostics: Option<Box<QueryDiagnostics>>,
 }
 
 impl<T> QueryValue<T> {
     pub(super) fn new(value: T,
-                      dep_node_index: DepNodeIndex,
-                      diagnostics: Vec<Diagnostic>)
+                      dep_node_index: DepNodeIndex)
                       -> QueryValue<T> {
         QueryValue {
             value,
             index: dep_node_index,
-            diagnostics: if diagnostics.len() == 0 {
-                None
-            } else {
-                Some(Box::new(QueryDiagnostics {
-                    diagnostics,
-                    emitted_diagnostics: Cell::new(true),
-                }))
-            },
         }
     }
 }
 
-pub(super) struct QueryDiagnostics {
-    pub(super) diagnostics: Vec<Diagnostic>,
-    pub(super) emitted_diagnostics: Cell<bool>,
-}
-
 impl<M: QueryDescription> QueryMap<M> {
     pub(super) fn new() -> QueryMap<M> {
         QueryMap {
@@ -284,16 +269,6 @@ macro_rules! define_maps {
                 );
 
                 if let Some(value) = tcx.maps.$name.borrow().map.get(&key) {
-                    if let Some(ref d) = value.diagnostics {
-                        if !d.emitted_diagnostics.get() {
-                            d.emitted_diagnostics.set(true);
-                            let handle = tcx.sess.diagnostic();
-                            for diagnostic in d.diagnostics.iter() {
-                                DiagnosticBuilder::new_diagnostic(handle, diagnostic.clone())
-                                    .emit();
-                            }
-                        }
-                    }
                     profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
                     tcx.dep_graph.read_index(value.index);
                     return Ok((&value.value).clone());
@@ -331,7 +306,11 @@ macro_rules! define_maps {
                     let ((result, dep_node_index), diagnostics) = res;
 
                     tcx.dep_graph.read_index(dep_node_index);
-                    let value = QueryValue::new(result, dep_node_index, diagnostics);
+
+                    tcx.on_disk_query_result_cache
+                       .store_diagnostics_for_anon_node(dep_node_index, diagnostics);
+
+                    let value = QueryValue::new(result, dep_node_index);
 
                     return Ok((&tcx.maps
                                     .$name
@@ -398,8 +377,11 @@ macro_rules! define_maps {
             {
                 debug_assert!(tcx.dep_graph.is_green(dep_node_index));
 
-                // We don't do any caching yet, so recompute
-                let (result, diagnostics) = tcx.cycle_check(span, Query::$name(key), || {
+                // We don't do any caching yet, so recompute.
+                // The diagnostics for this query have already been promoted to
+                // the current session during try_mark_green(), so we can ignore
+                // them here.
+                let (result, _) = tcx.cycle_check(span, Query::$name(key), || {
                     tcx.sess.diagnostic().track_diagnostics(|| {
                         // The dep-graph for this computation is already in place
                         tcx.dep_graph.with_ignore(|| {
@@ -412,7 +394,7 @@ macro_rules! define_maps {
                     tcx.dep_graph.mark_loaded_from_cache(dep_node_index, true);
                 }
 
-                let value = QueryValue::new(result, dep_node_index, diagnostics);
+                let value = QueryValue::new(result, dep_node_index);
 
                 Ok((&tcx.maps
                          .$name
@@ -447,7 +429,12 @@ macro_rules! define_maps {
                     tcx.dep_graph.mark_loaded_from_cache(dep_node_index, false);
                 }
 
-                let value = QueryValue::new(result, dep_node_index, diagnostics);
+                if dep_node.kind != ::dep_graph::DepKind::Null {
+                    tcx.on_disk_query_result_cache
+                       .store_diagnostics(dep_node_index, diagnostics);
+                }
+
+                let value = QueryValue::new(result, dep_node_index);
 
                 Ok(((&tcx.maps
                          .$name
diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs
index 129c81c5cd61f..99885e3e637cc 100644
--- a/src/librustc/ty/mod.rs
+++ b/src/librustc/ty/mod.rs
@@ -89,6 +89,7 @@ pub use self::maps::queries;
 pub mod adjustment;
 pub mod binding;
 pub mod cast;
+pub mod codec;
 pub mod error;
 mod erase_regions;
 pub mod fast_reject;
diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs
index 7dbf93da38598..6cbf17f459c5d 100644
--- a/src/librustc_driver/driver.rs
+++ b/src/librustc_driver/driver.rs
@@ -941,6 +941,10 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
 
     let time_passes = sess.time_passes();
 
+    let query_result_on_disk_cache = time(time_passes,
+        "load query result cache",
+        || rustc_incremental::load_query_result_cache(sess));
+
     let named_region_map = time(time_passes,
                                 "lifetime resolution",
                                 || middle::resolve_lifetime::krate(sess, cstore, &hir_map))?;
@@ -1049,6 +1053,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
                              resolutions,
                              named_region_map,
                              hir_map,
+                             query_result_on_disk_cache,
                              name,
                              tx,
                              output_filenames,
diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs
index 6de36820f0c19..d86d51f374722 100644
--- a/src/librustc_driver/test.rs
+++ b/src/librustc_driver/test.rs
@@ -23,6 +23,7 @@ use rustc::middle::resolve_lifetime;
 use rustc::ty::subst::{Kind, Subst};
 use rustc::traits::{ObligationCause, Reveal};
 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
+use rustc::ty::maps::OnDiskCache;
 use rustc::infer::{self, InferOk, InferResult};
 use rustc::infer::type_variable::TypeVariableOrigin;
 use rustc_metadata::cstore::CStore;
@@ -156,6 +157,7 @@ fn test_env<F>(source_string: &str,
                              resolutions,
                              named_region_map.unwrap(),
                              hir_map,
+                             OnDiskCache::new_empty(sess.codemap()),
                              "test_crate",
                              tx,
                              &outputs,
diff --git a/src/librustc_incremental/lib.rs b/src/librustc_incremental/lib.rs
index 0294adb3f5deb..e82c2897d2132 100644
--- a/src/librustc_incremental/lib.rs
+++ b/src/librustc_incremental/lib.rs
@@ -17,6 +17,9 @@
 
 #![feature(rand)]
 #![feature(conservative_impl_trait)]
+#![feature(i128_type)]
+#![feature(inclusive_range_syntax)]
+#![feature(specialization)]
 
 extern crate graphviz;
 #[macro_use] extern crate rustc;
@@ -31,8 +34,9 @@ mod assert_dep_graph;
 mod persist;
 
 pub use assert_dep_graph::assert_dep_graph;
-pub use persist::load_dep_graph;
 pub use persist::dep_graph_tcx_init;
+pub use persist::load_dep_graph;
+pub use persist::load_query_result_cache;
 pub use persist::save_dep_graph;
 pub use persist::save_trans_partition;
 pub use persist::save_work_products;
diff --git a/src/librustc_incremental/persist/fs.rs b/src/librustc_incremental/persist/fs.rs
index d53ee5c804f60..3c0090c4b4572 100644
--- a/src/librustc_incremental/persist/fs.rs
+++ b/src/librustc_incremental/persist/fs.rs
@@ -131,6 +131,7 @@ const LOCK_FILE_EXT: &'static str = ".lock";
 const DEP_GRAPH_FILENAME: &'static str = "dep-graph.bin";
 const WORK_PRODUCTS_FILENAME: &'static str = "work-products.bin";
 const METADATA_HASHES_FILENAME: &'static str = "metadata.bin";
+const QUERY_CACHE_FILENAME: &'static str = "query-cache.bin";
 
 // We encode integers using the following base, so they are shorter than decimal
 // or hexadecimal numbers (we want short file and directory names). Since these
@@ -150,6 +151,10 @@ pub fn metadata_hash_export_path(sess: &Session) -> PathBuf {
     in_incr_comp_dir_sess(sess, METADATA_HASHES_FILENAME)
 }
 
+pub fn query_cache_path(sess: &Session) -> PathBuf {
+    in_incr_comp_dir_sess(sess, QUERY_CACHE_FILENAME)
+}
+
 pub fn lock_file_path(session_dir: &Path) -> PathBuf {
     let crate_dir = session_dir.parent().unwrap();
 
diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs
index 63cfbcac1452e..158e9f2677a72 100644
--- a/src/librustc_incremental/persist/load.rs
+++ b/src/librustc_incremental/persist/load.rs
@@ -15,6 +15,7 @@ use rustc::hir::svh::Svh;
 use rustc::ich::Fingerprint;
 use rustc::session::Session;
 use rustc::ty::TyCtxt;
+use rustc::ty::maps::OnDiskCache;
 use rustc::util::nodemap::DefIdMap;
 use rustc_serialize::Decodable as RustcDecodable;
 use rustc_serialize::opaque::Decoder;
@@ -195,3 +196,15 @@ pub fn load_dep_graph(sess: &Session) -> PreviousDepGraph {
         empty
     }
 }
+
+pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess> {
+    if sess.opts.incremental.is_none() {
+        return OnDiskCache::new_empty(sess.codemap());
+    }
+
+    if let Some(bytes) = load_data(sess, &query_cache_path(sess)) {
+        OnDiskCache::new(sess, &bytes[..])
+    } else {
+        OnDiskCache::new_empty(sess.codemap())
+    }
+}
diff --git a/src/librustc_incremental/persist/mod.rs b/src/librustc_incremental/persist/mod.rs
index 88d49e7aedca7..82a43d85bc608 100644
--- a/src/librustc_incremental/persist/mod.rs
+++ b/src/librustc_incremental/persist/mod.rs
@@ -23,8 +23,9 @@ mod file_format;
 pub use self::fs::prepare_session_directory;
 pub use self::fs::finalize_session_directory;
 pub use self::fs::in_incr_comp_dir;
-pub use self::load::load_dep_graph;
 pub use self::load::dep_graph_tcx_init;
+pub use self::load::load_dep_graph;
+pub use self::load::load_query_result_cache;
 pub use self::save::save_dep_graph;
 pub use self::save::save_work_products;
 pub use self::work_product::save_trans_partition;
diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs
index b9f73500e273b..711550c27d16f 100644
--- a/src/librustc_incremental/persist/save.rs
+++ b/src/librustc_incremental/persist/save.rs
@@ -63,6 +63,12 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                            e));
     }
 
+    time(sess.time_passes(), "persist query result cache", || {
+        save_in(sess,
+                query_cache_path(sess),
+                |e| encode_query_cache(tcx, e));
+    });
+
     time(sess.time_passes(), "persist dep-graph", || {
         save_in(sess,
                 dep_graph_path(sess),
@@ -298,3 +304,9 @@ fn encode_work_products(dep_graph: &DepGraph,
 
     work_products.encode(encoder)
 }
+
+fn encode_query_cache(tcx: TyCtxt,
+                      encoder: &mut Encoder)
+                      -> io::Result<()> {
+    tcx.serialize_query_result_cache(encoder)
+}
diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs
index 65cf15e5a0ec7..fed0f526033fc 100644
--- a/src/librustc_metadata/decoder.rs
+++ b/src/librustc_metadata/decoder.rs
@@ -25,6 +25,7 @@ use rustc::ich::Fingerprint;
 use rustc::middle::lang_items;
 use rustc::session::Session;
 use rustc::ty::{self, Ty, TyCtxt};
+use rustc::ty::codec::{self as ty_codec, TyDecoder};
 use rustc::ty::subst::Substs;
 use rustc::util::nodemap::DefIdSet;
 
@@ -143,16 +144,6 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
         self.cdata.expect("missing CrateMetadata in DecodeContext")
     }
 
-    fn with_position<F: FnOnce(&mut Self) -> R, R>(&mut self, pos: usize, f: F) -> R {
-        let new_opaque = opaque::Decoder::new(self.opaque.data, pos);
-        let old_opaque = mem::replace(&mut self.opaque, new_opaque);
-        let old_state = mem::replace(&mut self.lazy_state, LazyState::NoNode);
-        let r = f(self);
-        self.opaque = old_opaque;
-        self.lazy_state = old_state;
-        r
-    }
-
     fn read_lazy_distance(&mut self, min_size: usize) -> Result<usize, <Self as Decoder>::Error> {
         let distance = self.read_usize()?;
         let position = match self.lazy_state {
@@ -208,6 +199,60 @@ impl<'doc, 'tcx> Decoder for DecodeContext<'doc, 'tcx> {
     }
 }
 
+
+impl<'a, 'tcx: 'a> TyDecoder<'a, 'tcx> for DecodeContext<'a, 'tcx> {
+
+    fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
+        self.tcx.expect("missing TyCtxt in DecodeContext")
+    }
+
+    fn peek_byte(&self) -> u8 {
+        self.opaque.data[self.opaque.position()]
+    }
+
+    fn cached_ty_for_shorthand<F>(&mut self,
+                                  shorthand: usize,
+                                  or_insert_with: F)
+                                  -> Result<Ty<'tcx>, Self::Error>
+        where F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>
+    {
+        let tcx = self.tcx();
+
+        let key = ty::CReaderCacheKey {
+            cnum: self.cdata().cnum,
+            pos: shorthand,
+        };
+
+        if let Some(&ty) = tcx.rcache.borrow().get(&key) {
+            return Ok(ty);
+        }
+
+        let ty = or_insert_with(self)?;
+        tcx.rcache.borrow_mut().insert(key, ty);
+        Ok(ty)
+    }
+
+    fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
+        where F: FnOnce(&mut Self) -> R
+    {
+        let new_opaque = opaque::Decoder::new(self.opaque.data, pos);
+        let old_opaque = mem::replace(&mut self.opaque, new_opaque);
+        let old_state = mem::replace(&mut self.lazy_state, LazyState::NoNode);
+        let r = f(self);
+        self.opaque = old_opaque;
+        self.lazy_state = old_state;
+        r
+    }
+
+    fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum {
+        if cnum == LOCAL_CRATE {
+            self.cdata().cnum
+        } else {
+            self.cdata().cnum_map.borrow()[cnum]
+        }
+    }
+}
+
 impl<'a, 'tcx, T> SpecializedDecoder<Lazy<T>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<Lazy<T>, Self::Error> {
         Ok(Lazy::with_position(self.read_lazy_distance(Lazy::<T>::min_size())?))
@@ -302,73 +347,37 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
 
 impl<'a, 'tcx> SpecializedDecoder<Ty<'tcx>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<Ty<'tcx>, Self::Error> {
-        let tcx = self.tcx();
-
-        // Handle shorthands first, if we have an usize > 0x80.
-        if self.opaque.data[self.opaque.position()] & 0x80 != 0 {
-            let pos = self.read_usize()?;
-            assert!(pos >= SHORTHAND_OFFSET);
-            let key = ty::CReaderCacheKey {
-                cnum: self.cdata().cnum,
-                pos: pos - SHORTHAND_OFFSET,
-            };
-            if let Some(ty) = tcx.rcache.borrow().get(&key).cloned() {
-                return Ok(ty);
-            }
-
-            let ty = self.with_position(key.pos, Ty::decode)?;
-            tcx.rcache.borrow_mut().insert(key, ty);
-            Ok(ty)
-        } else {
-            Ok(tcx.mk_ty(ty::TypeVariants::decode(self)?))
-        }
+        ty_codec::decode_ty(self)
     }
 }
 
-
 impl<'a, 'tcx> SpecializedDecoder<ty::GenericPredicates<'tcx>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<ty::GenericPredicates<'tcx>, Self::Error> {
-        Ok(ty::GenericPredicates {
-            parent: Decodable::decode(self)?,
-            predicates: (0..self.read_usize()?).map(|_| {
-                    // Handle shorthands first, if we have an usize > 0x80.
-                    if self.opaque.data[self.opaque.position()] & 0x80 != 0 {
-                        let pos = self.read_usize()?;
-                        assert!(pos >= SHORTHAND_OFFSET);
-                        let pos = pos - SHORTHAND_OFFSET;
-
-                        self.with_position(pos, ty::Predicate::decode)
-                    } else {
-                        ty::Predicate::decode(self)
-                    }
-                })
-                .collect::<Result<Vec<_>, _>>()?,
-        })
+        ty_codec::decode_predicates(self)
     }
 }
 
 impl<'a, 'tcx> SpecializedDecoder<&'tcx Substs<'tcx>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> {
-        Ok(self.tcx().mk_substs((0..self.read_usize()?).map(|_| Decodable::decode(self)))?)
+        ty_codec::decode_substs(self)
     }
 }
 
 impl<'a, 'tcx> SpecializedDecoder<ty::Region<'tcx>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<ty::Region<'tcx>, Self::Error> {
-        Ok(self.tcx().mk_region(Decodable::decode(self)?))
+        ty_codec::decode_region(self)
     }
 }
 
 impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice<Ty<'tcx>>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice<Ty<'tcx>>, Self::Error> {
-        Ok(self.tcx().mk_type_list((0..self.read_usize()?).map(|_| Decodable::decode(self)))?)
+        ty_codec::decode_ty_slice(self)
     }
 }
 
 impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::AdtDef> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<&'tcx ty::AdtDef, Self::Error> {
-        let def_id = DefId::decode(self)?;
-        Ok(self.tcx().adt_def(def_id))
+        ty_codec::decode_adt_def(self)
     }
 }
 
@@ -376,22 +385,19 @@ impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice<ty::ExistentialPredicate<'tcx>
     for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self)
         -> Result<&'tcx ty::Slice<ty::ExistentialPredicate<'tcx>>, Self::Error> {
-        Ok(self.tcx().mk_existential_predicates((0..self.read_usize()?)
-                                                .map(|_| Decodable::decode(self)))?)
+        ty_codec::decode_existential_predicate_slice(self)
     }
 }
 
 impl<'a, 'tcx> SpecializedDecoder<ByteArray<'tcx>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<ByteArray<'tcx>, Self::Error> {
-        Ok(ByteArray {
-            data: self.tcx().alloc_byte_array(&Vec::decode(self)?)
-        })
+        ty_codec::decode_byte_array(self)
     }
 }
 
 impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Const<'tcx>> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<&'tcx ty::Const<'tcx>, Self::Error> {
-        Ok(self.tcx().mk_const(Decodable::decode(self)?))
+        ty_codec::decode_const(self)
     }
 }
 
diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs
index 6b49be3e12192..bf2c61c13c321 100644
--- a/src/librustc_metadata/encoder.rs
+++ b/src/librustc_metadata/encoder.rs
@@ -25,14 +25,13 @@ use rustc::middle::lang_items;
 use rustc::mir;
 use rustc::traits::specialization_graph;
 use rustc::ty::{self, Ty, TyCtxt, ReprOptions};
+use rustc::ty::codec::{self as ty_codec, TyEncoder};
 
 use rustc::session::config::{self, CrateTypeProcMacro};
 use rustc::util::nodemap::{FxHashMap, NodeSet};
 
 use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
 
-use std::hash::Hash;
-use std::intrinsics;
 use std::io::prelude::*;
 use std::io::Cursor;
 use std::path::Path;
@@ -119,7 +118,7 @@ impl<'a, 'tcx, T> SpecializedEncoder<LazySeq<T>> for EncodeContext<'a, 'tcx> {
 
 impl<'a, 'tcx> SpecializedEncoder<Ty<'tcx>> for EncodeContext<'a, 'tcx> {
     fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
-        self.encode_with_shorthand(ty, &ty.sty, |ecx| &mut ecx.type_shorthands)
+        ty_codec::encode_with_shorthand(self, ty, |ecx| &mut ecx.type_shorthands)
     }
 }
 
@@ -127,20 +126,17 @@ impl<'a, 'tcx> SpecializedEncoder<ty::GenericPredicates<'tcx>> for EncodeContext
     fn specialized_encode(&mut self,
                           predicates: &ty::GenericPredicates<'tcx>)
                           -> Result<(), Self::Error> {
-        predicates.parent.encode(self)?;
-        predicates.predicates.len().encode(self)?;
-        for predicate in &predicates.predicates {
-            self.encode_with_shorthand(predicate, predicate, |ecx| &mut ecx.predicate_shorthands)?
-        }
-        Ok(())
+        ty_codec::encode_predicates(self, predicates, |ecx| &mut ecx.predicate_shorthands)
     }
 }
 
-impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
-
-    pub fn position(&self) -> usize {
+impl<'a, 'tcx> TyEncoder for EncodeContext<'a, 'tcx> {
+    fn position(&self) -> usize {
         self.opaque.position()
     }
+}
+
+impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
 
     fn emit_node<F: FnOnce(&mut Self, usize) -> R, R>(&mut self, f: F) -> R {
         assert_eq!(self.lazy_state, LazyState::NoNode);
@@ -204,44 +200,6 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
         })
     }
 
-    /// Encode the given value or a previously cached shorthand.
-    fn encode_with_shorthand<T, U, M>(&mut self,
-                                      value: &T,
-                                      variant: &U,
-                                      map: M)
-                                      -> Result<(), <Self as Encoder>::Error>
-        where M: for<'b> Fn(&'b mut Self) -> &'b mut FxHashMap<T, usize>,
-              T: Clone + Eq + Hash,
-              U: Encodable
-    {
-        let existing_shorthand = map(self).get(value).cloned();
-        if let Some(shorthand) = existing_shorthand {
-            return self.emit_usize(shorthand);
-        }
-
-        let start = self.position();
-        variant.encode(self)?;
-        let len = self.position() - start;
-
-        // The shorthand encoding uses the same usize as the
-        // discriminant, with an offset so they can't conflict.
-        let discriminant = unsafe { intrinsics::discriminant_value(variant) };
-        assert!(discriminant < SHORTHAND_OFFSET as u64);
-        let shorthand = start + SHORTHAND_OFFSET;
-
-        // Get the number of bits that leb128 could fit
-        // in the same space as the fully encoded type.
-        let leb128_bits = len * 7;
-
-        // Check that the shorthand is a not longer than the
-        // full encoding itself, i.e. it's an obvious win.
-        if leb128_bits >= 64 || (shorthand as u64) < (1 << leb128_bits) {
-            map(self).insert(value.clone(), shorthand);
-        }
-
-        Ok(())
-    }
-
     // Encodes something that corresponds to a single DepNode::GlobalMetaData
     // and registers the Fingerprint in the `metadata_hashes` map.
     pub fn tracked<'x, DATA, R>(&'x mut self,
diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs
index 54dbb68667b3a..20bdfaea0d0bb 100644
--- a/src/librustc_metadata/lib.rs
+++ b/src/librustc_metadata/lib.rs
@@ -15,7 +15,6 @@
 
 #![feature(box_patterns)]
 #![feature(conservative_impl_trait)]
-#![feature(core_intrinsics)]
 #![feature(i128_type)]
 #![feature(proc_macro_internals)]
 #![feature(quote)]
diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs
index dad0d26d2715d..e1d127d3516b4 100644
--- a/src/librustc_metadata/schema.rs
+++ b/src/librustc_metadata/schema.rs
@@ -53,11 +53,6 @@ pub const METADATA_VERSION: u8 = 4;
 pub const METADATA_HEADER: &'static [u8; 12] =
     &[0, 0, 0, 0, b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION];
 
-/// The shorthand encoding uses an enum's variant index `usize`
-/// and is offset by this value so it never matches a real variant.
-/// This offset is also chosen so that the first byte is never < 0x80.
-pub const SHORTHAND_OFFSET: usize = 0x80;
-
 /// A value of type T referred to by its absolute position
 /// in the metadata, and which can be decoded lazily.
 ///
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index dd46903bb88d7..ad78c550cf60e 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -17,11 +17,15 @@
 //! within the CodeMap, which upon request can be converted to line and column
 //! information, source code snippets, etc.
 
+
 pub use syntax_pos::*;
 pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo, NameAndSpan};
 pub use self::ExpnFormat::*;
 
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::stable_hasher::StableHasher;
 use std::cell::{RefCell, Ref};
+use std::hash::Hash;
 use std::path::{Path, PathBuf};
 use std::rc::Rc;
 
@@ -98,6 +102,24 @@ impl FileLoader for RealFileLoader {
     }
 }
 
+// This is a FileMap identifier that is used to correlate FileMaps between
+// subsequent compilation sessions (which is something we need to do during
+// incremental compilation).
+#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+pub struct StableFilemapId(u128);
+
+impl StableFilemapId {
+    pub fn new(filemap: &FileMap) -> StableFilemapId {
+        let mut hasher = StableHasher::new();
+
+        filemap.name.hash(&mut hasher);
+        filemap.name_was_remapped.hash(&mut hasher);
+        filemap.unmapped_path.hash(&mut hasher);
+
+        StableFilemapId(hasher.finish())
+    }
+}
+
 // _____________________________________________________________________________
 // CodeMap
 //
@@ -108,6 +130,7 @@ pub struct CodeMap {
     // This is used to apply the file path remapping as specified via
     // -Zremap-path-prefix to all FileMaps allocated within this CodeMap.
     path_mapping: FilePathMapping,
+    stable_id_to_filemap: RefCell<FxHashMap<StableFilemapId, Rc<FileMap>>>,
 }
 
 impl CodeMap {
@@ -116,6 +139,7 @@ impl CodeMap {
             files: RefCell::new(Vec::new()),
             file_loader: Box::new(RealFileLoader),
             path_mapping,
+            stable_id_to_filemap: RefCell::new(FxHashMap()),
         }
     }
 
@@ -126,6 +150,7 @@ impl CodeMap {
             files: RefCell::new(Vec::new()),
             file_loader,
             path_mapping,
+            stable_id_to_filemap: RefCell::new(FxHashMap()),
         }
     }
 
@@ -146,6 +171,10 @@ impl CodeMap {
         self.files.borrow()
     }
 
+    pub fn filemap_by_stable_id(&self, stable_id: StableFilemapId) -> Option<Rc<FileMap>> {
+        self.stable_id_to_filemap.borrow().get(&stable_id).map(|fm| fm.clone())
+    }
+
     fn next_start_pos(&self) -> usize {
         let files = self.files.borrow();
         match files.last() {
@@ -180,6 +209,10 @@ impl CodeMap {
 
         files.push(filemap.clone());
 
+        self.stable_id_to_filemap
+            .borrow_mut()
+            .insert(StableFilemapId::new(&filemap), filemap.clone());
+
         filemap
     }
 
@@ -241,6 +274,10 @@ impl CodeMap {
 
         files.push(filemap.clone());
 
+        self.stable_id_to_filemap
+            .borrow_mut()
+            .insert(StableFilemapId::new(&filemap), filemap.clone());
+
         filemap
     }
 
diff --git a/src/test/incremental/warnings-reemitted.rs b/src/test/incremental/warnings-reemitted.rs
new file mode 100644
index 0000000000000..bf66ac7829c2e
--- /dev/null
+++ b/src/test/incremental/warnings-reemitted.rs
@@ -0,0 +1,19 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// revisions: cfail1 cfail2 cfail3
+// compile-flags: -Coverflow-checks=on
+// must-compile-successfully
+
+#![allow(warnings)]
+
+fn main() {
+    255u8 + 1; //~ WARNING this expression will panic at run-time
+}