Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
128 changes: 63 additions & 65 deletions src/librustc_incremental/assert_dep_graph.rs
Original file line number Diff line number Diff line change
@@ -77,9 +77,11 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {

// Find annotations supplied by user (if any).
let (if_this_changed, then_this_would_need) = {
let mut visitor = IfThisChanged { tcx: tcx,
if_this_changed: vec![],
then_this_would_need: vec![] };
let mut visitor = IfThisChanged {
tcx: tcx,
if_this_changed: vec![],
then_this_would_need: vec![],
};
visitor.process_attrs(ast::CRATE_NODE_ID, &tcx.map.krate().attrs);
tcx.map.krate().visit_all_items(&mut visitor);
(visitor.if_this_changed, visitor.then_this_would_need)
@@ -89,7 +91,8 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
assert!(tcx.sess.opts.debugging_opts.query_dep_graph,
"cannot use the `#[{}]` or `#[{}]` annotations \
without supplying `-Z query-dep-graph`",
ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED);
ATTR_IF_THIS_CHANGED,
ATTR_THEN_THIS_WOULD_NEED);
}

// Check paths.
@@ -99,7 +102,7 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
type Sources = Vec<(Span, DefId, DepNode<DefId>)>;
type Targets = Vec<(Span, InternedString, ast::NodeId, DepNode<DefId>)>;

struct IfThisChanged<'a, 'tcx:'a> {
struct IfThisChanged<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
if_this_changed: Sources,
then_this_would_need: Targets,
@@ -153,15 +156,11 @@ impl<'a, 'tcx> IfThisChanged<'a, 'tcx> {
}
}
None => {
self.tcx.sess.span_fatal(
attr.span,
&format!("missing DepNode variant"));
self.tcx.sess.span_fatal(attr.span, &format!("missing DepNode variant"));
}
};
self.then_this_would_need.push((attr.span,
dep_node_interned.clone().unwrap(),
node_id,
dep_node));
self.then_this_would_need
.push((attr.span, dep_node_interned.clone().unwrap(), node_id, dep_node));
}
}
}
@@ -175,14 +174,12 @@ impl<'a, 'tcx> Visitor<'tcx> for IfThisChanged<'a, 'tcx> {

fn check_paths<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
if_this_changed: &Sources,
then_this_would_need: &Targets)
{
then_this_would_need: &Targets) {
// Return early here so as not to construct the query, which is not cheap.
if if_this_changed.is_empty() {
for &(target_span, _, _, _) in then_this_would_need {
tcx.sess.span_err(
target_span,
&format!("no #[rustc_if_this_changed] annotation detected"));
tcx.sess.span_err(target_span,
&format!("no #[rustc_if_this_changed] annotation detected"));

}
return;
@@ -192,15 +189,12 @@ fn check_paths<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let dependents = query.transitive_successors(source_dep_node);
for &(target_span, ref target_pass, _, ref target_dep_node) in then_this_would_need {
if !dependents.contains(&target_dep_node) {
tcx.sess.span_err(
target_span,
&format!("no path from `{}` to `{}`",
tcx.item_path_str(source_def_id),
target_pass));
tcx.sess.span_err(target_span,
&format!("no path from `{}` to `{}`",
tcx.item_path_str(source_def_id),
target_pass));
} else {
tcx.sess.span_err(
target_span,
&format!("OK"));
tcx.sess.span_err(target_span, &format!("OK"));
}
}
}
@@ -213,30 +207,31 @@ fn dump_graph(tcx: TyCtxt) {
let nodes = match env::var("RUST_DEP_GRAPH_FILTER") {
Ok(string) => {
// Expect one of: "-> target", "source -> target", or "source ->".
let edge_filter = EdgeFilter::new(&string).unwrap_or_else(|e| {
bug!("invalid filter: {}", e)
});
let edge_filter = EdgeFilter::new(&string)
.unwrap_or_else(|e| bug!("invalid filter: {}", e));
let sources = node_set(&query, &edge_filter.source);
let targets = node_set(&query, &edge_filter.target);
filter_nodes(&query, &sources, &targets)
}
Err(_) => {
query.nodes()
.into_iter()
.collect()
.into_iter()
.collect()
}
};
let edges = filter_edges(&query, &nodes);

{ // dump a .txt file with just the edges:
{
// dump a .txt file with just the edges:
let txt_path = format!("{}.txt", path);
let mut file = File::create(&txt_path).unwrap();
for &(ref source, ref target) in &edges {
write!(file, "{:?} -> {:?}\n", source, target).unwrap();
}
}

{ // dump a .dot file in graphviz format:
{
// dump a .dot file in graphviz format:
let dot_path = format!("{}.dot", path);
let mut v = Vec::new();
dot::render(&GraphvizDepGraph(nodes, edges), &mut v).unwrap();
@@ -272,10 +267,14 @@ impl<'a, 'tcx, 'q> dot::Labeller<'a> for GraphvizDepGraph<'q> {
dot::Id::new("DependencyGraph").unwrap()
}
fn node_id(&self, n: &&'q DepNode<DefId>) -> dot::Id {
let s: String =
format!("{:?}", n).chars()
.map(|c| if c == '_' || c.is_alphanumeric() { c } else { '_' })
.collect();
let s: String = format!("{:?}", n)
.chars()
.map(|c| if c == '_' || c.is_alphanumeric() {
c
} else {
'_'
})
.collect();
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@nrc @ubsan This looks harder to read.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, this is probably the trickiest corner of formatting at the moment - I prefer visual indent for method chains, but it makes closures either ugly or subject to extreme rightward drift.

debug!("n={:?} s={:?}", n, s);
dot::Id::new(s).unwrap()
}
@@ -287,9 +286,9 @@ impl<'a, 'tcx, 'q> dot::Labeller<'a> for GraphvizDepGraph<'q> {
// Given an optional filter like `"x,y,z"`, returns either `None` (no
// filter) or the set of nodes whose labels contain all of those
// substrings.
fn node_set<'q>(query: &'q DepGraphQuery<DefId>, filter: &DepNodeFilter)
-> Option<FnvHashSet<&'q DepNode<DefId>>>
{
fn node_set<'q>(query: &'q DepGraphQuery<DefId>,
filter: &DepNodeFilter)
-> Option<FnvHashSet<&'q DepNode<DefId>>> {
debug!("node_set(filter={:?})", filter);

if filter.accepts_all() {
@@ -302,8 +301,7 @@ fn node_set<'q>(query: &'q DepGraphQuery<DefId>, filter: &DepNodeFilter)
fn filter_nodes<'q>(query: &'q DepGraphQuery<DefId>,
sources: &Option<FnvHashSet<&'q DepNode<DefId>>>,
targets: &Option<FnvHashSet<&'q DepNode<DefId>>>)
-> FnvHashSet<&'q DepNode<DefId>>
{
-> FnvHashSet<&'q DepNode<DefId>> {
if let &Some(ref sources) = sources {
if let &Some(ref targets) = targets {
walk_between(query, sources, targets)
@@ -320,11 +318,12 @@ fn filter_nodes<'q>(query: &'q DepGraphQuery<DefId>,
fn walk_nodes<'q>(query: &'q DepGraphQuery<DefId>,
starts: &FnvHashSet<&'q DepNode<DefId>>,
direction: Direction)
-> FnvHashSet<&'q DepNode<DefId>>
{
-> FnvHashSet<&'q DepNode<DefId>> {
let mut set = FnvHashSet();
for &start in starts {
debug!("walk_nodes: start={:?} outgoing?={:?}", start, direction == OUTGOING);
debug!("walk_nodes: start={:?} outgoing?={:?}",
start,
direction == OUTGOING);
if set.insert(start) {
let mut stack = vec![query.indices[start]];
while let Some(index) = stack.pop() {
@@ -344,15 +343,19 @@ fn walk_nodes<'q>(query: &'q DepGraphQuery<DefId>,
fn walk_between<'q>(query: &'q DepGraphQuery<DefId>,
sources: &FnvHashSet<&'q DepNode<DefId>>,
targets: &FnvHashSet<&'q DepNode<DefId>>)
-> FnvHashSet<&'q DepNode<DefId>>
{
-> FnvHashSet<&'q DepNode<DefId>> {
// This is a bit tricky. We want to include a node only if it is:
// (a) reachable from a source and (b) will reach a target. And we
// have to be careful about cycles etc. Luckily efficiency is not
// a big concern!

#[derive(Copy, Clone, PartialEq)]
enum State { Undecided, Deciding, Included, Excluded }
enum State {
Undecided,
Deciding,
Included,
Excluded,
}

let mut node_states = vec![State::Undecided; query.graph.len_nodes()];

@@ -365,18 +368,14 @@ fn walk_between<'q>(query: &'q DepGraphQuery<DefId>,
}

return query.nodes()
.into_iter()
.filter(|&n| {
let index = query.indices[n];
node_states[index.0] == State::Included
})
.collect();

fn recurse(query: &DepGraphQuery<DefId>,
node_states: &mut [State],
node: NodeIndex)
-> bool
{
.into_iter()
.filter(|&n| {
let index = query.indices[n];
node_states[index.0] == State::Included
})
.collect();

fn recurse(query: &DepGraphQuery<DefId>, node_states: &mut [State], node: NodeIndex) -> bool {
match node_states[node.0] {
// known to reach a target
State::Included => return true,
@@ -387,7 +386,7 @@ fn walk_between<'q>(query: &'q DepGraphQuery<DefId>,
// backedge, not yet known, say false
State::Deciding => return false,

State::Undecided => { }
State::Undecided => {}
}

node_states[node.0] = State::Deciding;
@@ -411,10 +410,9 @@ fn walk_between<'q>(query: &'q DepGraphQuery<DefId>,

fn filter_edges<'q>(query: &'q DepGraphQuery<DefId>,
nodes: &FnvHashSet<&'q DepNode<DefId>>)
-> Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)>
{
-> Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)> {
query.edges()
.into_iter()
.filter(|&(source, target)| nodes.contains(source) && nodes.contains(target))
.collect()
.into_iter()
.filter(|&(source, target)| nodes.contains(source) && nodes.contains(target))
.collect()
}
Original file line number Diff line number Diff line change
@@ -68,7 +68,7 @@ impl<'tcx> CachingCodemapView<'tcx> {

// No cache hit ...
let mut oldest = 0;
for index in 1 .. self.line_cache.len() {
for index in 1..self.line_cache.len() {
if self.line_cache[index].time_stamp < self.line_cache[oldest].time_stamp {
oldest = index;
}
13 changes: 7 additions & 6 deletions src/librustc_incremental/calculate_svh/def_path_hash.rs
Original file line number Diff line number Diff line change
@@ -21,16 +21,17 @@ impl<'a, 'tcx> DefPathHashes<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
DefPathHashes {
tcx: tcx,
data: DefIdMap()
data: DefIdMap(),
}
}

pub fn hash(&mut self, def_id: DefId) -> u64 {
let tcx = self.tcx;
*self.data.entry(def_id)
.or_insert_with(|| {
let def_path = tcx.def_path(def_id);
def_path.deterministic_hash(tcx)
})
*self.data
.entry(def_id)
.or_insert_with(|| {
let def_path = tcx.def_path(def_id);
def_path.deterministic_hash(tcx)
})
}
}
2 changes: 1 addition & 1 deletion src/librustc_incremental/calculate_svh/hasher.rs
Original file line number Diff line number Diff line change
@@ -24,7 +24,7 @@ impl IchHasher {
let hash_size = mem::size_of::<Fingerprint>();
IchHasher {
state: ArchIndependentHasher::new(Blake2bHasher::new(hash_size, &[])),
bytes_hashed: 0
bytes_hashed: 0,
}
}

35 changes: 18 additions & 17 deletions src/librustc_incremental/calculate_svh/mod.rs
Original file line number Diff line number Diff line change
@@ -112,7 +112,8 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)

tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);

record_time(&tcx.sess.perf_stats.svh_time, || visitor.compute_crate_hash());
record_time(&tcx.sess.perf_stats.svh_time,
|| visitor.compute_crate_hash());
visitor.hashes
}

@@ -146,10 +147,11 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> {
let bytes_hashed = state.bytes_hashed();
let item_hash = state.finish();
self.hashes.insert(DepNode::Hir(def_id), item_hash);
debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, item_hash);
debug!("calculate_item_hash: def_id={:?} hash={:?}",
def_id,
item_hash);

let bytes_hashed = self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() +
bytes_hashed;
let bytes_hashed = self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() + bytes_hashed;
self.tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed);
}

@@ -167,18 +169,18 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> {
// crate hash.
{
let def_path_hashes = &mut self.def_path_hashes;
let mut item_hashes: Vec<_> =
self.hashes.iter()
.map(|(item_dep_node, &item_hash)| {
// convert from a DepNode<DefId> tp a
// DepNode<u64> where the u64 is the
// hash of the def-id's def-path:
let item_dep_node =
item_dep_node.map_def(|&did| Some(def_path_hashes.hash(did)))
.unwrap();
(item_dep_node, item_hash)
})
.collect();
let mut item_hashes: Vec<_> = self.hashes
.iter()
.map(|(item_dep_node, &item_hash)| {
// convert from a DepNode<DefId> tp a
// DepNode<u64> where the u64 is the
// hash of the def-id's def-path:
let item_dep_node =
item_dep_node.map_def(|&did| Some(def_path_hashes.hash(did)))
.unwrap();
(item_dep_node, item_hash)
})
.collect();
item_hashes.sort(); // avoid artificial dependencies on item ordering
item_hashes.hash(&mut crate_state);
}
@@ -210,4 +212,3 @@ impl<'a, 'tcx> visit::Visitor<'tcx> for HashItemsVisitor<'a, 'tcx> {
visit::walk_foreign_item(self, item);
}
}

159 changes: 76 additions & 83 deletions src/librustc_incremental/calculate_svh/svh_visitor.rs
Original file line number Diff line number Diff line change
@@ -36,15 +36,13 @@ use super::def_path_hash::DefPathHashes;
use super::caching_codemap_view::CachingCodemapView;
use super::hasher::IchHasher;

const IGNORED_ATTRIBUTES: &'static [&'static str] = &[
"cfg",
::ATTR_IF_THIS_CHANGED,
::ATTR_THEN_THIS_WOULD_NEED,
::ATTR_DIRTY,
::ATTR_CLEAN,
::ATTR_DIRTY_METADATA,
::ATTR_CLEAN_METADATA
];
const IGNORED_ATTRIBUTES: &'static [&'static str] = &["cfg",
::ATTR_IF_THIS_CHANGED,
::ATTR_THEN_THIS_WOULD_NEED,
::ATTR_DIRTY,
::ATTR_CLEAN,
::ATTR_DIRTY_METADATA,
::ATTR_CLEAN_METADATA];

pub struct StrictVersionHashVisitor<'a, 'hash: 'a, 'tcx: 'hash> {
pub tcx: TyCtxt<'hash, 'tcx, 'tcx>,
@@ -149,7 +147,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
// and assigns each a distinct tag to feed into the hash computation.
#[derive(Hash)]
enum SawAbiComponent<'a> {

// FIXME (#14132): should we include (some function of)
// ident.ctxt as well?
SawIdent(token::InternedString),
@@ -184,9 +181,7 @@ enum SawAbiComponent<'a> {
SawAssocTypeBinding,
SawAttribute(ast::AttrStyle),
SawMacroDef,
SawSpan(Option<(&'a str, usize, BytePos)>,
Option<(&'a str, usize, BytePos)>,
SawSpanExpnKind),
SawSpan(Option<(&'a str, usize, BytePos)>, Option<(&'a str, usize, BytePos)>, SawSpanExpnKind),
}

/// SawExprComponent carries all of the information that we want
@@ -208,7 +203,6 @@ enum SawAbiComponent<'a> {
/// Ty, TraitItem and ImplItem follow the same methodology.
#[derive(Hash)]
enum SawExprComponent<'a> {

SawExprLoop(Option<token::InternedString>),
SawExprField(token::InternedString),
SawExprTupField(usize),
@@ -243,35 +237,35 @@ enum SawExprComponent<'a> {

fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> {
match *node {
ExprBox(..) => SawExprBox,
ExprArray(..) => SawExprArray,
ExprCall(..) => SawExprCall,
ExprMethodCall(..) => SawExprMethodCall,
ExprTup(..) => SawExprTup,
ExprBinary(op, ..) => SawExprBinary(op.node),
ExprUnary(op, _) => SawExprUnary(op),
ExprLit(ref lit) => SawExprLit(lit.node.clone()),
ExprCast(..) => SawExprCast,
ExprType(..) => SawExprType,
ExprIf(..) => SawExprIf,
ExprWhile(..) => SawExprWhile,
ExprLoop(_, id) => SawExprLoop(id.map(|id| id.node.as_str())),
ExprMatch(..) => SawExprMatch,
ExprBox(..) => SawExprBox,
ExprArray(..) => SawExprArray,
ExprCall(..) => SawExprCall,
ExprMethodCall(..) => SawExprMethodCall,
ExprTup(..) => SawExprTup,
ExprBinary(op, ..) => SawExprBinary(op.node),
ExprUnary(op, _) => SawExprUnary(op),
ExprLit(ref lit) => SawExprLit(lit.node.clone()),
ExprCast(..) => SawExprCast,
ExprType(..) => SawExprType,
ExprIf(..) => SawExprIf,
ExprWhile(..) => SawExprWhile,
ExprLoop(_, id) => SawExprLoop(id.map(|id| id.node.as_str())),
ExprMatch(..) => SawExprMatch,
ExprClosure(cc, _, _, _) => SawExprClosure(cc),
ExprBlock(..) => SawExprBlock,
ExprAssign(..) => SawExprAssign,
ExprAssignOp(op, ..) => SawExprAssignOp(op.node),
ExprField(_, name) => SawExprField(name.node.as_str()),
ExprTupField(_, id) => SawExprTupField(id.node),
ExprIndex(..) => SawExprIndex,
ExprPath(ref qself, _) => SawExprPath(qself.as_ref().map(|q| q.position)),
ExprAddrOf(m, _) => SawExprAddrOf(m),
ExprBreak(id) => SawExprBreak(id.map(|id| id.node.as_str())),
ExprAgain(id) => SawExprAgain(id.map(|id| id.node.as_str())),
ExprRet(..) => SawExprRet,
ExprInlineAsm(ref a,..) => SawExprInlineAsm(a),
ExprStruct(..) => SawExprStruct,
ExprRepeat(..) => SawExprRepeat,
ExprBlock(..) => SawExprBlock,
ExprAssign(..) => SawExprAssign,
ExprAssignOp(op, ..) => SawExprAssignOp(op.node),
ExprField(_, name) => SawExprField(name.node.as_str()),
ExprTupField(_, id) => SawExprTupField(id.node),
ExprIndex(..) => SawExprIndex,
ExprPath(ref qself, _) => SawExprPath(qself.as_ref().map(|q| q.position)),
ExprAddrOf(m, _) => SawExprAddrOf(m),
ExprBreak(id) => SawExprBreak(id.map(|id| id.node.as_str())),
ExprAgain(id) => SawExprAgain(id.map(|id| id.node.as_str())),
ExprRet(..) => SawExprRet,
ExprInlineAsm(ref a, ..) => SawExprInlineAsm(a),
ExprStruct(..) => SawExprStruct,
ExprRepeat(..) => SawExprRepeat,
}
}

@@ -290,15 +284,15 @@ enum SawItemComponent {
SawItemUnion,
SawItemTrait(Unsafety),
SawItemDefaultImpl(Unsafety),
SawItemImpl(Unsafety, ImplPolarity)
SawItemImpl(Unsafety, ImplPolarity),
}

fn saw_item(node: &Item_) -> SawItemComponent {
match *node {
ItemExternCrate(..) => SawItemExternCrate,
ItemUse(..) => SawItemUse,
ItemStatic(_, mutability, _) => SawItemStatic(mutability),
ItemConst(..) =>SawItemConst,
ItemConst(..) => SawItemConst,
ItemFn(_, unsafety, constness, abi, _, _) => SawItemFn(unsafety, constness, abi),
ItemMod(..) => SawItemMod,
ItemForeignMod(..) => SawItemForeignMod,
@@ -308,7 +302,7 @@ fn saw_item(node: &Item_) -> SawItemComponent {
ItemUnion(..) => SawItemUnion,
ItemTrait(unsafety, ..) => SawItemTrait(unsafety),
ItemDefaultImpl(unsafety, _) => SawItemDefaultImpl(unsafety),
ItemImpl(unsafety, implpolarity, ..) => SawItemImpl(unsafety, implpolarity)
ItemImpl(unsafety, implpolarity, ..) => SawItemImpl(unsafety, implpolarity),
}
}

@@ -324,7 +318,7 @@ enum SawPatComponent {
SawPatRef(Mutability),
SawPatLit,
SawPatRange,
SawPatSlice
SawPatSlice,
}

fn saw_pat(node: &PatKind) -> SawPatComponent {
@@ -339,7 +333,7 @@ fn saw_pat(node: &PatKind) -> SawPatComponent {
PatKind::Ref(_, mutability) => SawPatRef(mutability),
PatKind::Lit(..) => SawPatLit,
PatKind::Range(..) => SawPatRange,
PatKind::Slice(..) => SawPatSlice
PatKind::Slice(..) => SawPatSlice,
}
}

@@ -357,49 +351,51 @@ enum SawTyComponent {
SawTyPolyTraitRef,
SawTyImplTrait,
SawTyTypeof,
SawTyInfer
SawTyInfer,
}

fn saw_ty(node: &Ty_) -> SawTyComponent {
match *node {
TySlice(..) => SawTySlice,
TyArray(..) => SawTyArray,
TyPtr(ref mty) => SawTyPtr(mty.mutbl),
TyRptr(_, ref mty) => SawTyRptr(mty.mutbl),
TyBareFn(ref barefnty) => SawTyBareFn(barefnty.unsafety, barefnty.abi),
TyNever => SawTyNever,
TyTup(..) => SawTyTup,
TyPath(..) => SawTyPath,
TyObjectSum(..) => SawTyObjectSum,
TyPolyTraitRef(..) => SawTyPolyTraitRef,
TyImplTrait(..) => SawTyImplTrait,
TyTypeof(..) => SawTyTypeof,
TyInfer => SawTyInfer
TySlice(..) => SawTySlice,
TyArray(..) => SawTyArray,
TyPtr(ref mty) => SawTyPtr(mty.mutbl),
TyRptr(_, ref mty) => SawTyRptr(mty.mutbl),
TyBareFn(ref barefnty) => SawTyBareFn(barefnty.unsafety, barefnty.abi),
TyNever => SawTyNever,
TyTup(..) => SawTyTup,
TyPath(..) => SawTyPath,
TyObjectSum(..) => SawTyObjectSum,
TyPolyTraitRef(..) => SawTyPolyTraitRef,
TyImplTrait(..) => SawTyImplTrait,
TyTypeof(..) => SawTyTypeof,
TyInfer => SawTyInfer,
}
}

#[derive(Hash)]
enum SawTraitOrImplItemComponent {
SawTraitOrImplItemConst,
SawTraitOrImplItemMethod(Unsafety, Constness, Abi),
SawTraitOrImplItemType
SawTraitOrImplItemType,
}

fn saw_trait_item(ti: &TraitItem_) -> SawTraitOrImplItemComponent {
match *ti {
ConstTraitItem(..) => SawTraitOrImplItemConst,
MethodTraitItem(ref sig, _) =>
SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi),
TypeTraitItem(..) => SawTraitOrImplItemType
MethodTraitItem(ref sig, _) => {
SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi)
}
TypeTraitItem(..) => SawTraitOrImplItemType,
}
}

fn saw_impl_item(ii: &ImplItemKind) -> SawTraitOrImplItemComponent {
match *ii {
ImplItemKind::Const(..) => SawTraitOrImplItemConst,
ImplItemKind::Method(ref sig, _) =>
SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi),
ImplItemKind::Type(..) => SawTraitOrImplItemType
ImplItemKind::Method(ref sig, _) => {
SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi)
}
ImplItemKind::Type(..) => SawTraitOrImplItemType,
}
}

@@ -444,10 +440,7 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has
visit::walk_struct_def(self, s);
}

fn visit_variant(&mut self,
v: &'tcx Variant,
g: &'tcx Generics,
item_id: NodeId) {
fn visit_variant(&mut self, v: &'tcx Variant, g: &'tcx Generics, item_id: NodeId) {
debug!("visit_variant: st={:?}", self.st);
SawVariant.hash(self.st);
hash_attrs!(self, &v.node.attrs);
@@ -527,7 +520,8 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has

fn visit_mod(&mut self, m: &'tcx Mod, _s: Span, n: NodeId) {
debug!("visit_mod: st={:?}", self.st);
SawMod.hash(self.st); visit::walk_mod(self, m, n)
SawMod.hash(self.st);
visit::walk_mod(self, m, n)
}

fn visit_ty(&mut self, t: &'tcx Ty) {
@@ -705,17 +699,17 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
}

if let Some(traits) = self.tcx.trait_map.get(&id) {
debug!("hash_resolve: id={:?} traits={:?} st={:?}", id, traits, self.st);
debug!("hash_resolve: id={:?} traits={:?} st={:?}",
id,
traits,
self.st);
traits.len().hash(self.st);

// The ordering of the candidates is not fixed. So we hash
// the def-ids and then sort them and hash the collection.
let mut candidates: Vec<_> =
traits.iter()
.map(|&TraitCandidate { def_id, import_id: _ }| {
self.compute_def_id_hash(def_id)
})
.collect();
let mut candidates: Vec<_> = traits.iter()
.map(|&TraitCandidate { def_id, import_id: _ }| self.compute_def_id_hash(def_id))
.collect();
candidates.sort();
candidates.hash(self.st);
}
@@ -825,8 +819,7 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {

for i in indices {
let attr = &attributes[i].node;
if !attr.is_sugared_doc &&
!IGNORED_ATTRIBUTES.contains(&&*attr.value.name()) {
if !attr.is_sugared_doc && !IGNORED_ATTRIBUTES.contains(&&*attr.value.name()) {
SawAttribute(attr.style).hash(self.st);
self.hash_meta_item(&*attr.value);
}
@@ -838,7 +831,7 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
F: Fn(&T) -> K
{
let mut indices = Vec::with_capacity(items.len());
indices.extend(0 .. items.len());
indices.extend(0..items.len());
indices.sort_by_key(|index| get_key(&items[*index]));
indices
}
17 changes: 6 additions & 11 deletions src/librustc_incremental/ich/fingerprint.rs
Original file line number Diff line number Diff line change
@@ -23,8 +23,8 @@ impl Fingerprint {

pub fn from_smaller_hash(hash: u64) -> Fingerprint {
let mut result = Fingerprint::zero();
result.0[0] = (hash >> 0) as u8;
result.0[1] = (hash >> 8) as u8;
result.0[0] = (hash >> 0) as u8;
result.0[1] = (hash >> 8) as u8;
result.0[2] = (hash >> 16) as u8;
result.0[3] = (hash >> 24) as u8;
result.0[4] = (hash >> 32) as u8;
@@ -35,14 +35,9 @@ impl Fingerprint {
}

pub fn to_smaller_hash(&self) -> u64 {
((self.0[0] as u64) << 0) |
((self.0[1] as u64) << 8) |
((self.0[2] as u64) << 16) |
((self.0[3] as u64) << 24) |
((self.0[4] as u64) << 32) |
((self.0[5] as u64) << 40) |
((self.0[6] as u64) << 48) |
((self.0[7] as u64) << 56)
((self.0[0] as u64) << 0) | ((self.0[1] as u64) << 8) | ((self.0[2] as u64) << 16) |
((self.0[3] as u64) << 24) | ((self.0[4] as u64) << 32) |
((self.0[5] as u64) << 40) | ((self.0[6] as u64) << 48) | ((self.0[7] as u64) << 56)
}
}

@@ -69,7 +64,7 @@ impl Decodable for Fingerprint {

impl ::std::fmt::Display for Fingerprint {
fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
for i in 0 .. self.0.len() {
for i in 0..self.0.len() {
if i > 0 {
write!(formatter, "::")?;
}
9 changes: 6 additions & 3 deletions src/librustc_incremental/lib.rs
Original file line number Diff line number Diff line change
@@ -27,12 +27,15 @@
#![feature(core_intrinsics)]

extern crate graphviz;
#[macro_use] extern crate rustc;
#[macro_use]
extern crate rustc;
extern crate rustc_data_structures;
extern crate serialize as rustc_serialize;

#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
#[macro_use]
extern crate log;
#[macro_use]
extern crate syntax;
extern crate syntax_pos;

const ATTR_DIRTY: &'static str = "rustc_dirty";
2 changes: 1 addition & 1 deletion src/librustc_incremental/persist/data.rs
Original file line number Diff line number Diff line change
@@ -106,7 +106,7 @@ pub struct SerializedMetadataHashes {
/// is only populated if -Z query-dep-graph is specified. It will be
/// empty otherwise. Importing crates are perfectly happy with just having
/// the DefIndex.
pub index_map: FnvHashMap<DefIndex, DefPathIndex>
pub index_map: FnvHashMap<DefIndex, DefPathIndex>,
}

/// The hash for some metadata that (when saving) will be exported
88 changes: 46 additions & 42 deletions src/librustc_incremental/persist/directory.rs
Original file line number Diff line number Diff line change
@@ -26,7 +26,7 @@ use std::collections::HashMap;
#[derive(Copy, Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq,
RustcEncodable, RustcDecodable)]
pub struct DefPathIndex {
index: u32
index: u32,
}

#[derive(RustcEncodable, RustcDecodable)]
@@ -49,14 +49,19 @@ pub struct CrateInfo {

impl DefIdDirectory {
pub fn new(krates: Vec<CrateInfo>) -> DefIdDirectory {
DefIdDirectory { paths: vec![], krates: krates }
DefIdDirectory {
paths: vec![],
krates: krates,
}
}

fn max_current_crate(&self, tcx: TyCtxt) -> CrateNum {
tcx.sess.cstore.crates()
.into_iter()
.max()
.unwrap_or(LOCAL_CRATE)
tcx.sess
.cstore
.crates()
.into_iter()
.max()
.unwrap_or(LOCAL_CRATE)
}

/// Returns a string form for `index`; useful for debugging
@@ -72,7 +77,8 @@ impl DefIdDirectory {
pub fn krate_still_valid(&self,
tcx: TyCtxt,
max_current_crate: CrateNum,
krate: CrateNum) -> bool {
krate: CrateNum)
-> bool {
// Check that the crate-number still matches. For now, if it
// doesn't, just return None. We could do better, such as
// finding the new number.
@@ -96,35 +102,33 @@ impl DefIdDirectory {
format!("{}/{}", name, disambiguator)
}

let new_krates: HashMap<_, _> =
once(LOCAL_CRATE)
let new_krates: HashMap<_, _> = once(LOCAL_CRATE)
.chain(tcx.sess.cstore.crates())
.map(|krate| (make_key(&tcx.crate_name(krate),
&tcx.crate_disambiguator(krate)), krate))
.map(|krate| (make_key(&tcx.crate_name(krate), &tcx.crate_disambiguator(krate)), krate))
.collect();

let ids = self.paths.iter()
.map(|path| {
let old_krate_id = path.krate.as_usize();
assert!(old_krate_id < self.krates.len());
let old_crate_info = &self.krates[old_krate_id];
let old_crate_key = make_key(&old_crate_info.name,
&old_crate_info.disambiguator);
if let Some(&new_crate_key) = new_krates.get(&old_crate_key) {
tcx.retrace_path(new_crate_key, &path.data)
} else {
debug!("crate {:?} no longer exists", old_crate_key);
None
}
})
.collect();
let ids = self.paths
.iter()
.map(|path| {
let old_krate_id = path.krate.as_usize();
assert!(old_krate_id < self.krates.len());
let old_crate_info = &self.krates[old_krate_id];
let old_crate_key = make_key(&old_crate_info.name, &old_crate_info.disambiguator);
if let Some(&new_crate_key) = new_krates.get(&old_crate_key) {
tcx.retrace_path(new_crate_key, &path.data)
} else {
debug!("crate {:?} no longer exists", old_crate_key);
None
}
})
.collect();
RetracedDefIdDirectory { ids: ids }
}
}

#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct RetracedDefIdDirectory {
ids: Vec<Option<DefId>>
ids: Vec<Option<DefId>>,
}

impl RetracedDefIdDirectory {
@@ -137,22 +141,21 @@ impl RetracedDefIdDirectory {
}
}

pub struct DefIdDirectoryBuilder<'a,'tcx:'a> {
pub struct DefIdDirectoryBuilder<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
hash: DefIdMap<DefPathIndex>,
directory: DefIdDirectory,
}

impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> {
impl<'a, 'tcx> DefIdDirectoryBuilder<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefIdDirectoryBuilder<'a, 'tcx> {
let mut krates: Vec<_> =
once(LOCAL_CRATE)
let mut krates: Vec<_> = once(LOCAL_CRATE)
.chain(tcx.sess.cstore.crates())
.map(|krate| {
CrateInfo {
krate: krate,
name: tcx.crate_name(krate).to_string(),
disambiguator: tcx.crate_disambiguator(krate).to_string()
disambiguator: tcx.crate_disambiguator(krate).to_string(),
}
})
.collect();
@@ -176,14 +179,15 @@ impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> {
debug!("DefIdDirectoryBuilder: def_id={:?}", def_id);
let tcx = self.tcx;
let paths = &mut self.directory.paths;
self.hash.entry(def_id)
.or_insert_with(|| {
let def_path = tcx.def_path(def_id);
let index = paths.len() as u32;
paths.push(def_path);
DefPathIndex { index: index }
})
.clone()
self.hash
.entry(def_id)
.or_insert_with(|| {
let def_path = tcx.def_path(def_id);
let index = paths.len() as u32;
paths.push(def_path);
DefPathIndex { index: index }
})
.clone()
}

pub fn lookup_def_path(&self, id: DefPathIndex) -> &DefPath {
@@ -202,7 +206,7 @@ impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> {
impl Debug for DefIdDirectory {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt.debug_list()
.entries(self.paths.iter().enumerate())
.finish()
.entries(self.paths.iter().enumerate())
.finish()
}
}
84 changes: 39 additions & 45 deletions src/librustc_incremental/persist/dirty_clean.rs
Original file line number Diff line number Diff line change
@@ -67,10 +67,9 @@ pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}

let _ignore = tcx.dep_graph.in_ignore();
let dirty_inputs: FnvHashSet<DepNode<DefId>> =
dirty_inputs.iter()
.filter_map(|d| retraced.map(d))
.collect();
let dirty_inputs: FnvHashSet<DepNode<DefId>> = dirty_inputs.iter()
.filter_map(|d| retraced.map(d))
.collect();
let query = tcx.dep_graph.query();
debug!("query-nodes: {:?}", query.nodes());
let krate = tcx.map.krate();
@@ -81,14 +80,13 @@ pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
});
}

pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
pub struct DirtyCleanVisitor<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
query: &'a DepGraphQuery<DefId>,
dirty_inputs: FnvHashSet<DepNode<DefId>>,
}

impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {

fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> {
for item in attr.meta_item_list().unwrap_or(&[]) {
if item.check_name(LABEL) {
@@ -115,26 +113,26 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
debug!("assert_dirty({:?})", dep_node);

match dep_node {
DepNode::Krate |
DepNode::Hir(_) => {
DepNode::Krate | DepNode::Hir(_) => {
// HIR nodes are inputs, so if we are asserting that the HIR node is
// dirty, we check the dirty input set.
if !self.dirty_inputs.contains(&dep_node) {
let dep_node_str = self.dep_node_str(&dep_node);
self.tcx.sess.span_err(
item.span,
&format!("`{:?}` not found in dirty set, but should be dirty",
dep_node_str));
self.tcx.sess.span_err(item.span,
&format!("`{:?}` not found in dirty set, but should \
be dirty",
dep_node_str));
}
}
_ => {
// Other kinds of nodes would be targets, so check if
// the dep-graph contains the node.
if self.query.contains_node(&dep_node) {
let dep_node_str = self.dep_node_str(&dep_node);
self.tcx.sess.span_err(
item.span,
&format!("`{:?}` found in dep graph, but should be dirty", dep_node_str));
self.tcx.sess.span_err(item.span,
&format!("`{:?}` found in dep graph, but should be \
dirty",
dep_node_str));
}
}
}
@@ -144,25 +142,24 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
debug!("assert_clean({:?})", dep_node);

match dep_node {
DepNode::Krate |
DepNode::Hir(_) => {
DepNode::Krate | DepNode::Hir(_) => {
// For HIR nodes, check the inputs.
if self.dirty_inputs.contains(&dep_node) {
let dep_node_str = self.dep_node_str(&dep_node);
self.tcx.sess.span_err(
item.span,
&format!("`{:?}` found in dirty-node set, but should be clean",
dep_node_str));
self.tcx.sess.span_err(item.span,
&format!("`{:?}` found in dirty-node set, but should \
be clean",
dep_node_str));
}
}
_ => {
// Otherwise, check if the dep-node exists.
if !self.query.contains_node(&dep_node) {
let dep_node_str = self.dep_node_str(&dep_node);
self.tcx.sess.span_err(
item.span,
&format!("`{:?}` not found in dep graph, but should be clean",
dep_node_str));
self.tcx.sess.span_err(item.span,
&format!("`{:?}` not found in dep graph, but should \
be clean",
dep_node_str));
}
}
}
@@ -187,13 +184,14 @@ impl<'a, 'tcx> Visitor<'tcx> for DirtyCleanVisitor<'a, 'tcx> {
}

pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
prev_metadata_hashes: &FnvHashMap<DefId, Fingerprint>,
current_metadata_hashes: &FnvHashMap<DefId, Fingerprint>) {
prev_metadata_hashes: &FnvHashMap<DefId, Fingerprint>,
current_metadata_hashes: &FnvHashMap<DefId,
Fingerprint>) {
if !tcx.sess.opts.debugging_opts.query_dep_graph {
return;
}

tcx.dep_graph.with_ignore(||{
tcx.dep_graph.with_ignore(|| {
let krate = tcx.map.krate();
krate.visit_all_items(&mut DirtyCleanMetadataVisitor {
tcx: tcx,
@@ -203,7 +201,7 @@ pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
});
}

pub struct DirtyCleanMetadataVisitor<'a, 'tcx:'a, 'm> {
pub struct DirtyCleanMetadataVisitor<'a, 'tcx: 'a, 'm> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
prev_metadata_hashes: &'m FnvHashMap<DefId, Fingerprint>,
current_metadata_hashes: &'m FnvHashMap<DefId, Fingerprint>,
@@ -228,7 +226,6 @@ impl<'a, 'tcx, 'm> Visitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {
}

impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {

fn assert_state(&self, should_be_clean: bool, def_id: DefId, span: Span) {
let item_path = self.tcx.item_path_str(def_id);
debug!("assert_state({})", item_path);
@@ -237,24 +234,23 @@ impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {
let hashes_are_equal = prev_hash == self.current_metadata_hashes[&def_id];

if should_be_clean && !hashes_are_equal {
self.tcx.sess.span_err(
span,
&format!("Metadata hash of `{}` is dirty, but should be clean",
item_path));
self.tcx.sess.span_err(span,
&format!("Metadata hash of `{}` is dirty, but should be \
clean",
item_path));
}

let should_be_dirty = !should_be_clean;
if should_be_dirty && hashes_are_equal {
self.tcx.sess.span_err(
span,
&format!("Metadata hash of `{}` is clean, but should be dirty",
item_path));
self.tcx.sess.span_err(span,
&format!("Metadata hash of `{}` is clean, but should be \
dirty",
item_path));
}
} else {
self.tcx.sess.span_err(
span,
&format!("Could not find previous metadata hash of `{}`",
item_path));
self.tcx.sess.span_err(span,
&format!("Could not find previous metadata hash of `{}`",
item_path));
}
}
}
@@ -280,9 +276,7 @@ fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool {
}
}

tcx.sess.span_fatal(
attr.span,
&format!("no cfg attribute"));
tcx.sess.span_fatal(attr.span, &format!("no cfg attribute"));
}

fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> InternedString {
14 changes: 6 additions & 8 deletions src/librustc_incremental/persist/file_format.rs
Original file line number Diff line number Diff line change
@@ -39,8 +39,7 @@ const RUSTC_VERSION: Option<&'static str> = option_env!("CFG_VERSION");

pub fn write_file_header<W: io::Write>(stream: &mut W) -> io::Result<()> {
stream.write_all(FILE_MAGIC)?;
stream.write_all(&[(HEADER_FORMAT_VERSION >> 0) as u8,
(HEADER_FORMAT_VERSION >> 8) as u8])?;
stream.write_all(&[(HEADER_FORMAT_VERSION >> 0) as u8, (HEADER_FORMAT_VERSION >> 8) as u8])?;

let rustc_version = rustc_version();
assert_eq!(rustc_version.len(), (rustc_version.len() as u8) as usize);
@@ -72,7 +71,7 @@ pub fn read_file(path: &Path) -> io::Result<Option<Vec<u8>>> {
let mut file_magic = [0u8; 4];
file.read_exact(&mut file_magic)?;
if file_magic != FILE_MAGIC {
return Ok(None)
return Ok(None);
}
}

@@ -85,7 +84,7 @@ pub fn read_file(path: &Path) -> io::Result<Option<Vec<u8>>> {
((header_format_version[1] as u16) << 8);

if header_format_version != HEADER_FORMAT_VERSION {
return Ok(None)
return Ok(None);
}
}

@@ -112,11 +111,10 @@ pub fn read_file(path: &Path) -> io::Result<Option<Vec<u8>>> {
fn rustc_version() -> String {
if nightly_options::is_nightly_build() {
if let Some(val) = env::var_os("RUSTC_FORCE_INCR_COMP_ARTIFACT_HEADER") {
return val.to_string_lossy().into_owned()
return val.to_string_lossy().into_owned();
}
}

RUSTC_VERSION.expect("Cannot use rustc without explicit version for \
incremental compilation")
.to_string()
RUSTC_VERSION.expect("Cannot use rustc without explicit version for incremental compilation")
.to_string()
}
345 changes: 172 additions & 173 deletions src/librustc_incremental/persist/fs.rs

Large diffs are not rendered by default.

45 changes: 23 additions & 22 deletions src/librustc_incremental/persist/hash.rs
Original file line number Diff line number Diff line change
@@ -44,18 +44,15 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {

pub fn is_hashable(dep_node: &DepNode<DefId>) -> bool {
match *dep_node {
DepNode::Krate |
DepNode::Hir(_) => true,
DepNode::Krate | DepNode::Hir(_) => true,
DepNode::MetaData(def_id) => !def_id.is_local(),
_ => false,
}
}

pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<Fingerprint> {
match *dep_node {
DepNode::Krate => {
Some(self.incremental_hashes_map[dep_node])
}
DepNode::Krate => Some(self.incremental_hashes_map[dep_node]),

// HIR nodes (which always come from our crate) are an input:
DepNode::Hir(def_id) => {
@@ -76,9 +73,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
// MetaData nodes from *our* crates are an *output*; we
// don't hash them, but we do compute a hash for them and
// save it for others to use.
DepNode::MetaData(def_id) if !def_id.is_local() => {
Some(self.metadata_hash(def_id))
}
DepNode::MetaData(def_id) if !def_id.is_local() => Some(self.metadata_hash(def_id)),

_ => {
// Other kinds of nodes represent computed by-products
@@ -134,44 +129,44 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
// Lock the directory we'll be reading the hashes from.
let lock_file_path = lock_file_path(&session_dir);
let _lock = match flock::Lock::new(&lock_file_path,
false, // don't wait
false, // don't create the lock-file
false, // don't wait
false, // don't create the lock-file
false) { // shared lock
Ok(lock) => lock,
Err(err) => {
debug!("Could not acquire lock on `{}` while trying to \
load metadata hashes: {}",
lock_file_path.display(),
err);
lock_file_path.display(),
err);

// Could not acquire the lock. The directory is probably in
// in the process of being deleted. It's OK to just exit
// here. It's the same scenario as if the file had not
// existed in the first place.
return
return;
}
};

let hashes_file_path = metadata_hash_import_path(&session_dir);

match file_format::read_file(&hashes_file_path)
{
match file_format::read_file(&hashes_file_path) {
Ok(Some(data)) => {
match self.load_from_data(cnum, &data, svh) {
Ok(()) => { }
Err(err) => {
bug!("decoding error in dep-graph from `{}`: {}",
&hashes_file_path.display(), err);
&hashes_file_path.display(),
err);
}
}
}
Ok(None) => {
// If the file is not found, that's ok.
}
Err(err) => {
self.tcx.sess.err(
&format!("could not load dep information from `{}`: {}",
hashes_file_path.display(), err));
self.tcx.sess.err(&format!("could not load dep information from `{}`: {}",
hashes_file_path.display(),
err));
}
}
}
@@ -180,7 +175,8 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
fn load_from_data(&mut self,
cnum: CrateNum,
data: &[u8],
expected_svh: Svh) -> Result<(), String> {
expected_svh: Svh)
-> Result<(), String> {
debug!("load_from_data(cnum={})", cnum);

// Load up the hashes for the def-ids from this crate.
@@ -198,11 +194,16 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
// the hashes are stored with just a def-index, which is
// always relative to the old crate; convert that to use
// our internal crate number
let def_id = DefId { krate: cnum, index: serialized_hash.def_index };
let def_id = DefId {
krate: cnum,
index: serialized_hash.def_index,
};

// record the hash for this dep-node
let old = self.item_metadata_hashes.insert(def_id, serialized_hash.hash);
debug!("load_from_data: def_id={:?} hash={}", def_id, serialized_hash.hash);
debug!("load_from_data: def_id={:?} hash={}",
def_id,
serialized_hash.hash);
assert!(old.is_none(), "already have hash for {:?}", def_id);
}
Ok(())
107 changes: 57 additions & 50 deletions src/librustc_incremental/persist/load.rs
Original file line number Diff line number Diff line change
@@ -19,7 +19,7 @@ use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap};
use rustc_serialize::Decodable as RustcDecodable;
use rustc_serialize::opaque::Decoder;
use std::fs;
use std::path::{Path};
use std::path::Path;

use IncrementalHashesMap;
use ich::Fingerprint;
@@ -55,7 +55,7 @@ pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// We successfully allocated a session directory, but there is no
// dep-graph data in it to load (because this is the first
// compilation session with this incr. comp. dir.)
return
return;
}
Err(()) => {
// Something went wrong while trying to allocate the session
@@ -73,23 +73,25 @@ fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let dep_graph_path = dep_graph_path(tcx.sess);
let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
Some(p) => p,
None => return // no file
None => return, // no file
};

let work_products_path = work_products_path(tcx.sess);
let work_products_data = match load_data(tcx.sess, &work_products_path) {
Some(p) => p,
None => return // no file
None => return, // no file
};

match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) {
match decode_dep_graph(tcx,
incremental_hashes_map,
&dep_graph_data,
&work_products_data) {
Ok(dirty_nodes) => dirty_nodes,
Err(err) => {
tcx.sess.warn(
&format!("decoding error in dep-graph from `{}` and `{}`: {}",
dep_graph_path.display(),
work_products_path.display(),
err));
tcx.sess.warn(&format!("decoding error in dep-graph from `{}` and `{}`: {}",
dep_graph_path.display(),
work_products_path.display(),
err));
}
}
}
@@ -102,16 +104,17 @@ fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
// compiler version. Neither is an error.
}
Err(err) => {
sess.err(
&format!("could not load dep-graph from `{}`: {}",
path.display(), err));
sess.err(&format!("could not load dep-graph from `{}`: {}",
path.display(),
err));
}
}

if let Err(err) = delete_all_session_dir_contents(sess) {
sess.err(&format!("could not clear incompatible incremental \
compilation session directory `{}`: {}",
path.display(), err));
path.display(),
err));
}

None
@@ -123,8 +126,7 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &IncrementalHashesMap,
dep_graph_data: &[u8],
work_products_data: &[u8])
-> Result<(), String>
{
-> Result<(), String> {
// Decode the list of work_products
let mut work_product_decoder = Decoder::new(work_products_data, 0);
let work_products = <Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder)?;
@@ -175,13 +177,13 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// the set of symbols that go into a work-product: if any symbols
// have been removed (or added) the hash will be different and
// we'll ignore the work-product then.
let retraced_edges: Vec<_> =
serialized_dep_graph.edges.iter()
.filter_map(|&(ref raw_source_node, ref raw_target_node)| {
retraced.map(raw_target_node)
.map(|target_node| (raw_source_node, target_node))
})
.collect();
let retraced_edges: Vec<_> = serialized_dep_graph.edges
.iter()
.filter_map(|&(ref raw_source_node, ref raw_target_node)| {
retraced.map(raw_target_node)
.map(|target_node| (raw_source_node, target_node))
})
.collect();

// Compute which work-products have an input that has changed or
// been removed. Put the dirty ones into a set.
@@ -215,7 +217,9 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,

let source_node = retraced.map(raw_source_node).unwrap();

debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source_node, target_node);
debug!("decode_dep_graph: clean edge: {:?} -> {:?}",
source_node,
target_node);

let _task = dep_graph.in_task(target_node);
dep_graph.read(source_node);
@@ -248,8 +252,8 @@ fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let current_hash = hcx.hash(&dep_node).unwrap();
if current_hash == hash.hash {
debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
current_hash);
dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
current_hash);
continue;
}
debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
@@ -279,36 +283,35 @@ fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
debug!("reconcile_work_products: dep-node for {:?} is dirty", swp);
delete_dirty_work_product(tcx, swp);
} else {
let all_files_exist =
swp.work_product
.saved_files
.iter()
.all(|&(_, ref file_name)| {
let path = in_incr_comp_dir_sess(tcx.sess, &file_name);
path.exists()
});
let all_files_exist = swp.work_product
.saved_files
.iter()
.all(|&(_, ref file_name)| {
let path = in_incr_comp_dir_sess(tcx.sess, &file_name);
path.exists()
});
if all_files_exist {
debug!("reconcile_work_products: all files for {:?} exist", swp);
tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product);
} else {
debug!("reconcile_work_products: some file for {:?} does not exist", swp);
debug!("reconcile_work_products: some file for {:?} does not exist",
swp);
delete_dirty_work_product(tcx, swp);
}
}
}
}

fn delete_dirty_work_product(tcx: TyCtxt,
swp: SerializedWorkProduct) {
fn delete_dirty_work_product(tcx: TyCtxt, swp: SerializedWorkProduct) {
debug!("delete_dirty_work_product({:?})", swp);
for &(_, ref file_name) in &swp.work_product.saved_files {
let path = in_incr_comp_dir_sess(tcx.sess, file_name);
match fs::remove_file(&path) {
Ok(()) => { }
Err(err) => {
tcx.sess.warn(
&format!("file-system error deleting outdated file `{}`: {}",
path.display(), err));
tcx.sess.warn(&format!("file-system error deleting outdated file `{}`: {}",
path.display(),
err));
}
}
}
@@ -318,7 +321,7 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
retraced: &RetracedDefIdDirectory,
output: &mut FnvHashMap<DefId, Fingerprint>) {
if !tcx.sess.opts.debugging_opts.query_dep_graph {
return
return;
}

debug!("load_prev_metadata_hashes() - Loading previous metadata hashes");
@@ -327,23 +330,27 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,

if !file_path.exists() {
debug!("load_prev_metadata_hashes() - Couldn't find file containing \
hashes at `{}`", file_path.display());
return
hashes at `{}`",
file_path.display());
return;
}

debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
debug!("load_prev_metadata_hashes() - File: {}",
file_path.display());

let data = match file_format::read_file(&file_path) {
Ok(Some(data)) => data,
Ok(None) => {
debug!("load_prev_metadata_hashes() - File produced by incompatible \
compiler version: {}", file_path.display());
return
compiler version: {}",
file_path.display());
return;
}
Err(err) => {
debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}",
file_path.display(), err);
return
file_path.display(),
err);
return;
}
};

@@ -354,7 +361,8 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,

debug!("load_prev_metadata_hashes() - Mapping DefIds");

assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.hashes.len());
assert_eq!(serialized_hashes.index_map.len(),
serialized_hashes.hashes.len());
for serialized_hash in serialized_hashes.hashes {
let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index];
if let Some(def_id) = retraced.def_id(def_path_index) {
@@ -366,4 +374,3 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
debug!("load_prev_metadata_hashes() - successfully loaded {} hashes",
serialized_hashes.index_map.len());
}

20 changes: 11 additions & 9 deletions src/librustc_incremental/persist/preds.rs
Original file line number Diff line number Diff line change
@@ -39,16 +39,18 @@ impl<'q> Predecessors<'q> {

let inputs: FnvHashMap<_, _> = all_nodes.iter()
.enumerate()
.filter(|&(_, node)| match node.data {
DepNode::WorkProduct(_) => true,
DepNode::MetaData(ref def_id) => def_id.is_local(),
.filter(|&(_, node)| {
match node.data {
DepNode::WorkProduct(_) => true,
DepNode::MetaData(ref def_id) => def_id.is_local(),

// if -Z query-dep-graph is passed, save more extended data
// to enable better unit testing
DepNode::TypeckItemBody(_) |
DepNode::TransCrateItem(_) => tcx.sess.opts.debugging_opts.query_dep_graph,
// if -Z query-dep-graph is passed, save more extended data
// to enable better unit testing
DepNode::TypeckItemBody(_) |
DepNode::TransCrateItem(_) => tcx.sess.opts.debugging_opts.query_dep_graph,

_ => false,
_ => false,
}
})
.map(|(node_index, node)| {
dfs.reset(NodeIndex(node_index));
@@ -63,7 +65,7 @@ impl<'q> Predecessors<'q> {
let mut hashes = FnvHashMap();
for input in inputs.values().flat_map(|v| v.iter().cloned()) {
hashes.entry(input)
.or_insert_with(|| hcx.hash(input).unwrap());
.or_insert_with(|| hcx.hash(input).unwrap());
}

Predecessors {
22 changes: 10 additions & 12 deletions src/librustc_incremental/persist/save.rs
Original file line number Diff line number Diff line change
@@ -51,22 +51,20 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// IMPORTANT: We are saving the metadata hashes *before* the dep-graph,
// since metadata-encoding might add new entries to the
// DefIdDirectory (which is saved in the dep-graph file).
save_in(sess,
metadata_hash_export_path(sess),
|e| encode_metadata_hashes(tcx,
svh,
&preds,
&mut builder,
&mut current_metadata_hashes,
e));
save_in(sess, metadata_hash_export_path(sess), |e| {
encode_metadata_hashes(tcx,
svh,
&preds,
&mut builder,
&mut current_metadata_hashes,
e)
});
save_in(sess,
dep_graph_path(sess),
|e| encode_dep_graph(&preds, &mut builder, e));

let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow();
dirty_clean::check_dirty_clean_metadata(tcx,
&*prev_metadata_hashes,
&current_metadata_hashes);
dirty_clean::check_dirty_clean_metadata(tcx, &*prev_metadata_hashes, &current_metadata_hashes);
}

pub fn save_work_products(sess: &Session) {
@@ -198,7 +196,7 @@ pub fn encode_metadata_hashes(tcx: TyCtxt,
// (I initially wrote this with an iterator, but it seemed harder to read.)
let mut serialized_hashes = SerializedMetadataHashes {
hashes: vec![],
index_map: FnvHashMap()
index_map: FnvHashMap(),
};

let mut def_id_hashes = FnvHashMap();
35 changes: 17 additions & 18 deletions src/librustc_incremental/persist/work_product.rs
Original file line number Diff line number Diff line change
@@ -31,24 +31,23 @@ pub fn save_trans_partition(sess: &Session,
}
let work_product_id = Arc::new(WorkProductId(cgu_name.to_string()));

let saved_files: Option<Vec<_>> =
files.iter()
.map(|&(kind, ref path)| {
let file_name = format!("cgu-{}.{}", cgu_name, kind.extension());
let path_in_incr_dir = in_incr_comp_dir_sess(sess, &file_name);
match link_or_copy(path, &path_in_incr_dir) {
Ok(_) => Some((kind, file_name)),
Err(err) => {
sess.warn(&format!("error copying object file `{}` \
to incremental directory as `{}`: {}",
path.display(),
path_in_incr_dir.display(),
err));
None
}
}
})
.collect();
let saved_files: Option<Vec<_>> = files.iter()
.map(|&(kind, ref path)| {
let file_name = format!("cgu-{}.{}", cgu_name, kind.extension());
let path_in_incr_dir = in_incr_comp_dir_sess(sess, &file_name);
match link_or_copy(path, &path_in_incr_dir) {
Ok(_) => Some((kind, file_name)),
Err(err) => {
sess.warn(&format!("error copying object file `{}` to incremental directory \
as `{}`: {}",
path.display(),
path_in_incr_dir.display(),
err));
None
}
}
})
.collect();
let saved_files = match saved_files {
Some(v) => v,
None => return,