Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit fb2446a

Browse files
committedOct 30, 2018
Auto merge of #55190 - dlavati:51574_rename_codemap_filemap, r=petrochenkov
Rename other occs of (Code/File)Map to Source(Map/File) #51574 Additional renamings for #51574.
2 parents d586d5d + 6c9f6a1 commit fb2446a

File tree

13 files changed

+210
-209
lines changed

13 files changed

+210
-209
lines changed
 

‎src/librustc/ich/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,10 @@
1111
//! ICH - Incremental Compilation Hash
1212
1313
crate use rustc_data_structures::fingerprint::Fingerprint;
14-
pub use self::caching_codemap_view::CachingSourceMapView;
14+
pub use self::caching_source_map_view::CachingSourceMapView;
1515
pub use self::hcx::{StableHashingContextProvider, StableHashingContext, NodeIdHashingMode,
1616
hash_stable_trait_impls};
17-
mod caching_codemap_view;
17+
mod caching_source_map_view;
1818
mod hcx;
1919

2020
mod impls_cstore;

‎src/librustc/ty/query/on_disk_cache.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque,
2525
use session::{CrateDisambiguator, Session};
2626
use std::mem;
2727
use syntax::ast::NodeId;
28-
use syntax::source_map::{SourceMap, StableFilemapId};
28+
use syntax::source_map::{SourceMap, StableSourceFileId};
2929
use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile};
3030
use syntax_pos::hygiene::{Mark, SyntaxContext, ExpnInfo};
3131
use ty;
@@ -62,7 +62,7 @@ pub struct OnDiskCache<'sess> {
6262
cnum_map: Once<IndexVec<CrateNum, Option<CrateNum>>>,
6363

6464
source_map: &'sess SourceMap,
65-
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableFilemapId>,
65+
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableSourceFileId>,
6666

6767
// These two fields caches that are populated lazily during decoding.
6868
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
@@ -82,7 +82,7 @@ pub struct OnDiskCache<'sess> {
8282
// This type is used only for (de-)serialization.
8383
#[derive(RustcEncodable, RustcDecodable)]
8484
struct Footer {
85-
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableFilemapId>,
85+
file_index_to_stable_id: FxHashMap<SourceFileIndex, StableSourceFileId>,
8686
prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
8787
query_result_index: EncodedQueryResultIndex,
8888
diagnostics_index: EncodedQueryResultIndex,
@@ -181,7 +181,7 @@ impl<'sess> OnDiskCache<'sess> {
181181
let index = SourceFileIndex(index as u32);
182182
let file_ptr: *const SourceFile = &**file as *const _;
183183
file_to_file_index.insert(file_ptr, index);
184-
file_index_to_stable_id.insert(index, StableFilemapId::new(&file));
184+
file_index_to_stable_id.insert(index, StableSourceFileId::new(&file));
185185
}
186186

187187
(file_to_file_index, file_index_to_stable_id)
@@ -473,7 +473,7 @@ struct CacheDecoder<'a, 'tcx: 'a, 'x> {
473473
cnum_map: &'x IndexVec<CrateNum, Option<CrateNum>>,
474474
synthetic_expansion_infos: &'x Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
475475
file_index_to_file: &'x Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
476-
file_index_to_stable_id: &'x FxHashMap<SourceFileIndex, StableFilemapId>,
476+
file_index_to_stable_id: &'x FxHashMap<SourceFileIndex, StableSourceFileId>,
477477
alloc_decoding_session: AllocDecodingSession<'x>,
478478
}
479479

‎src/librustc_errors/emitter.rs

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ impl ColorConfig {
120120

121121
pub struct EmitterWriter {
122122
dst: Destination,
123-
cm: Option<Lrc<SourceMapperDyn>>,
123+
sm: Option<Lrc<SourceMapperDyn>>,
124124
short_message: bool,
125125
teach: bool,
126126
ui_testing: bool,
@@ -134,28 +134,28 @@ struct FileWithAnnotatedLines {
134134

135135
impl EmitterWriter {
136136
pub fn stderr(color_config: ColorConfig,
137-
code_map: Option<Lrc<SourceMapperDyn>>,
137+
source_map: Option<Lrc<SourceMapperDyn>>,
138138
short_message: bool,
139139
teach: bool)
140140
-> EmitterWriter {
141141
let dst = Destination::from_stderr(color_config);
142142
EmitterWriter {
143143
dst,
144-
cm: code_map,
144+
sm: source_map,
145145
short_message,
146146
teach,
147147
ui_testing: false,
148148
}
149149
}
150150

151151
pub fn new(dst: Box<dyn Write + Send>,
152-
code_map: Option<Lrc<SourceMapperDyn>>,
152+
source_map: Option<Lrc<SourceMapperDyn>>,
153153
short_message: bool,
154154
teach: bool)
155155
-> EmitterWriter {
156156
EmitterWriter {
157157
dst: Raw(dst),
158-
cm: code_map,
158+
sm: source_map,
159159
short_message,
160160
teach,
161161
ui_testing: false,
@@ -214,14 +214,14 @@ impl EmitterWriter {
214214
let mut output = vec![];
215215
let mut multiline_annotations = vec![];
216216

217-
if let Some(ref cm) = self.cm {
217+
if let Some(ref sm) = self.sm {
218218
for span_label in msp.span_labels() {
219219
if span_label.span.is_dummy() {
220220
continue;
221221
}
222222

223-
let lo = cm.lookup_char_pos(span_label.span.lo());
224-
let mut hi = cm.lookup_char_pos(span_label.span.hi());
223+
let lo = sm.lookup_char_pos(span_label.span.lo());
224+
let mut hi = sm.lookup_char_pos(span_label.span.hi());
225225

226226
// Watch out for "empty spans". If we get a span like 6..6, we
227227
// want to just display a `^` at 6, so convert that to
@@ -724,10 +724,10 @@ impl EmitterWriter {
724724

725725
fn get_multispan_max_line_num(&mut self, msp: &MultiSpan) -> usize {
726726
let mut max = 0;
727-
if let Some(ref cm) = self.cm {
727+
if let Some(ref sm) = self.sm {
728728
for primary_span in msp.primary_spans() {
729729
if !primary_span.is_dummy() {
730-
let hi = cm.lookup_char_pos(primary_span.hi());
730+
let hi = sm.lookup_char_pos(primary_span.hi());
731731
if hi.line > max {
732732
max = hi.line;
733733
}
@@ -736,7 +736,7 @@ impl EmitterWriter {
736736
if !self.short_message {
737737
for span_label in msp.span_labels() {
738738
if !span_label.span.is_dummy() {
739-
let hi = cm.lookup_char_pos(span_label.span.hi());
739+
let hi = sm.lookup_char_pos(span_label.span.hi());
740740
if hi.line > max {
741741
max = hi.line;
742742
}
@@ -768,7 +768,7 @@ impl EmitterWriter {
768768
always_backtrace: bool) -> bool {
769769
let mut spans_updated = false;
770770

771-
if let Some(ref cm) = self.cm {
771+
if let Some(ref sm) = self.sm {
772772
let mut before_after: Vec<(Span, Span)> = vec![];
773773
let mut new_labels: Vec<(Span, String)> = vec![];
774774

@@ -777,7 +777,7 @@ impl EmitterWriter {
777777
if sp.is_dummy() {
778778
continue;
779779
}
780-
let call_sp = cm.call_span_if_macro(*sp);
780+
let call_sp = sm.call_span_if_macro(*sp);
781781
if call_sp != *sp && !always_backtrace {
782782
before_after.push((*sp, call_sp));
783783
}
@@ -802,7 +802,7 @@ impl EmitterWriter {
802802
})));
803803
}
804804
// Check to make sure we're not in any <*macros>
805-
if !cm.span_to_filename(def_site).is_macros() &&
805+
if !sm.span_to_filename(def_site).is_macros() &&
806806
!trace.macro_decl_name.starts_with("desugaring of ") &&
807807
!trace.macro_decl_name.starts_with("#[") ||
808808
always_backtrace {
@@ -829,7 +829,7 @@ impl EmitterWriter {
829829
if sp_label.span.is_dummy() {
830830
continue;
831831
}
832-
if cm.span_to_filename(sp_label.span.clone()).is_macros() &&
832+
if sm.span_to_filename(sp_label.span.clone()).is_macros() &&
833833
!always_backtrace
834834
{
835835
let v = sp_label.span.macro_backtrace();
@@ -1000,10 +1000,10 @@ impl EmitterWriter {
10001000
let mut annotated_files = self.preprocess_annotations(msp);
10011001

10021002
// Make sure our primary file comes first
1003-
let (primary_lo, cm) = if let (Some(cm), Some(ref primary_span)) =
1004-
(self.cm.as_ref(), msp.primary_span().as_ref()) {
1003+
let (primary_lo, sm) = if let (Some(sm), Some(ref primary_span)) =
1004+
(self.sm.as_ref(), msp.primary_span().as_ref()) {
10051005
if !primary_span.is_dummy() {
1006-
(cm.lookup_char_pos(primary_span.lo()), cm)
1006+
(sm.lookup_char_pos(primary_span.lo()), sm)
10071007
} else {
10081008
emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;
10091009
return Ok(());
@@ -1021,7 +1021,7 @@ impl EmitterWriter {
10211021
// Print out the annotate source lines that correspond with the error
10221022
for annotated_file in annotated_files {
10231023
// we can't annotate anything if the source is unavailable.
1024-
if !cm.ensure_source_file_source_present(annotated_file.file.clone()) {
1024+
if !sm.ensure_source_file_source_present(annotated_file.file.clone()) {
10251025
continue;
10261026
}
10271027

@@ -1038,7 +1038,7 @@ impl EmitterWriter {
10381038
buffer.append(buffer_msg_line_offset,
10391039
&format!("{}:{}:{}",
10401040
loc.file.name,
1041-
cm.doctest_offset_line(loc.line),
1041+
sm.doctest_offset_line(loc.line),
10421042
loc.col.0 + 1),
10431043
Style::LineAndColumn);
10441044
for _ in 0..max_line_num_len {
@@ -1048,7 +1048,7 @@ impl EmitterWriter {
10481048
buffer.prepend(0,
10491049
&format!("{}:{}:{}: ",
10501050
loc.file.name,
1051-
cm.doctest_offset_line(loc.line),
1051+
sm.doctest_offset_line(loc.line),
10521052
loc.col.0 + 1),
10531053
Style::LineAndColumn);
10541054
}
@@ -1069,7 +1069,7 @@ impl EmitterWriter {
10691069
};
10701070
format!("{}:{}{}",
10711071
annotated_file.file.name,
1072-
cm.doctest_offset_line(first_line.line_index),
1072+
sm.doctest_offset_line(first_line.line_index),
10731073
col)
10741074
} else {
10751075
annotated_file.file.name.to_string()
@@ -1194,7 +1194,7 @@ impl EmitterWriter {
11941194
level: &Level,
11951195
max_line_num_len: usize)
11961196
-> io::Result<()> {
1197-
if let Some(ref cm) = self.cm {
1197+
if let Some(ref sm) = self.sm {
11981198
let mut buffer = StyledBuffer::new();
11991199

12001200
// Render the suggestion message
@@ -1210,7 +1210,7 @@ impl EmitterWriter {
12101210
Some(Style::HeaderMsg));
12111211

12121212
// Render the replacements for each suggestion
1213-
let suggestions = suggestion.splice_lines(&**cm);
1213+
let suggestions = suggestion.splice_lines(&**sm);
12141214

12151215
let mut row_num = 2;
12161216
for &(ref complete, ref parts) in suggestions.iter().take(MAX_SUGGESTIONS) {
@@ -1221,11 +1221,11 @@ impl EmitterWriter {
12211221
&& parts[0].snippet.trim() == complete.trim())
12221222
&& complete.lines().count() == 1;
12231223

1224-
let lines = cm.span_to_lines(parts[0].span).unwrap();
1224+
let lines = sm.span_to_lines(parts[0].span).unwrap();
12251225

12261226
assert!(!lines.lines.is_empty());
12271227

1228-
let line_start = cm.lookup_char_pos(parts[0].span.lo()).line;
1228+
let line_start = sm.lookup_char_pos(parts[0].span.lo()).line;
12291229
draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1);
12301230
let mut line_pos = 0;
12311231
let mut lines = complete.lines();
@@ -1250,8 +1250,8 @@ impl EmitterWriter {
12501250
if show_underline {
12511251
draw_col_separator(&mut buffer, row_num, max_line_num_len + 1);
12521252
for part in parts {
1253-
let span_start_pos = cm.lookup_char_pos(part.span.lo()).col_display;
1254-
let span_end_pos = cm.lookup_char_pos(part.span.hi()).col_display;
1253+
let span_start_pos = sm.lookup_char_pos(part.span.lo()).col_display;
1254+
let span_end_pos = sm.lookup_char_pos(part.span.hi()).col_display;
12551255

12561256
// Do not underline the leading...
12571257
let start = part.snippet.len()

‎src/librustc_errors/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ pub trait SourceMapper {
129129
fn span_to_filename(&self, sp: Span) -> FileName;
130130
fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>;
131131
fn call_span_if_macro(&self, sp: Span) -> Span;
132-
fn ensure_source_file_source_present(&self, file_map: Lrc<SourceFile>) -> bool;
132+
fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool;
133133
fn doctest_offset_line(&self, line: usize) -> usize;
134134
}
135135

‎src/libsyntax/json.rs

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -36,19 +36,19 @@ use rustc_serialize::json::{as_json, as_pretty_json};
3636
pub struct JsonEmitter {
3737
dst: Box<dyn Write + Send>,
3838
registry: Option<Registry>,
39-
cm: Lrc<dyn SourceMapper + sync::Send + sync::Sync>,
39+
sm: Lrc<dyn SourceMapper + sync::Send + sync::Sync>,
4040
pretty: bool,
4141
ui_testing: bool,
4242
}
4343

4444
impl JsonEmitter {
4545
pub fn stderr(registry: Option<Registry>,
46-
code_map: Lrc<SourceMap>,
46+
source_map: Lrc<SourceMap>,
4747
pretty: bool) -> JsonEmitter {
4848
JsonEmitter {
4949
dst: Box::new(io::stderr()),
5050
registry,
51-
cm: code_map,
51+
sm: source_map,
5252
pretty,
5353
ui_testing: false,
5454
}
@@ -62,12 +62,12 @@ impl JsonEmitter {
6262

6363
pub fn new(dst: Box<dyn Write + Send>,
6464
registry: Option<Registry>,
65-
code_map: Lrc<SourceMap>,
65+
source_map: Lrc<SourceMap>,
6666
pretty: bool) -> JsonEmitter {
6767
JsonEmitter {
6868
dst,
6969
registry,
70-
cm: code_map,
70+
sm: source_map,
7171
pretty,
7272
ui_testing: false,
7373
}
@@ -199,7 +199,7 @@ impl Diagnostic {
199199
}
200200
let buf = BufWriter::default();
201201
let output = buf.clone();
202-
EmitterWriter::new(Box::new(buf), Some(je.cm.clone()), false, false)
202+
EmitterWriter::new(Box::new(buf), Some(je.sm.clone()), false, false)
203203
.ui_testing(je.ui_testing).emit(db);
204204
let output = Arc::try_unwrap(output.0).unwrap().into_inner().unwrap();
205205
let output = String::from_utf8(output).unwrap();
@@ -269,8 +269,8 @@ impl DiagnosticSpan {
269269
mut backtrace: vec::IntoIter<MacroBacktrace>,
270270
je: &JsonEmitter)
271271
-> DiagnosticSpan {
272-
let start = je.cm.lookup_char_pos(span.lo());
273-
let end = je.cm.lookup_char_pos(span.hi());
272+
let start = je.sm.lookup_char_pos(span.lo());
273+
let end = je.sm.lookup_char_pos(span.hi());
274274
let backtrace_step = backtrace.next().map(|bt| {
275275
let call_site =
276276
Self::from_span_full(bt.call_site,
@@ -356,7 +356,7 @@ impl DiagnosticSpanLine {
356356
/// of `span` gets a DiagnosticSpanLine, with the highlight indicating the
357357
/// `span` within the line.
358358
fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
359-
je.cm.span_to_lines(span)
359+
je.sm.span_to_lines(span)
360360
.map(|lines| {
361361
let fm = &*lines.file;
362362
lines.lines

‎src/libsyntax/parse/lexer/mod.rs

Lines changed: 54 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -256,11 +256,11 @@ impl<'a> StringReader<'a> {
256256
let end = sess.source_map().lookup_byte_offset(span.hi());
257257

258258
// Make the range zero-length if the span is invalid.
259-
if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos {
259+
if span.lo() > span.hi() || begin.sf.start_pos != end.sf.start_pos {
260260
span = span.shrink_to_lo();
261261
}
262262

263-
let mut sr = StringReader::new_raw_internal(sess, begin.fm, None);
263+
let mut sr = StringReader::new_raw_internal(sess, begin.sf, None);
264264

265265
// Seek the lexer to the right byte range.
266266
sr.next_pos = span.lo();
@@ -640,9 +640,9 @@ impl<'a> StringReader<'a> {
640640

641641
// I guess this is the only way to figure out if
642642
// we're at the beginning of the file...
643-
let cmap = SourceMap::new(FilePathMapping::empty());
644-
cmap.files.borrow_mut().file_maps.push(self.source_file.clone());
645-
let loc = cmap.lookup_char_pos_adj(self.pos);
643+
let smap = SourceMap::new(FilePathMapping::empty());
644+
smap.files.borrow_mut().source_files.push(self.source_file.clone());
645+
let loc = smap.lookup_char_pos_adj(self.pos);
646646
debug!("Skipping a shebang");
647647
if loc.line == 1 && loc.col == CharPos(0) {
648648
// FIXME: Add shebang "token", return it
@@ -1855,17 +1855,17 @@ mod tests {
18551855
use rustc_data_structures::fx::FxHashSet;
18561856
use rustc_data_structures::sync::Lock;
18571857
use with_globals;
1858-
fn mk_sess(cm: Lrc<SourceMap>) -> ParseSess {
1858+
fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
18591859
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
1860-
Some(cm.clone()),
1860+
Some(sm.clone()),
18611861
false,
18621862
false);
18631863
ParseSess {
18641864
span_diagnostic: errors::Handler::with_emitter(true, false, Box::new(emitter)),
18651865
unstable_features: UnstableFeatures::from_environment(),
18661866
config: CrateConfig::default(),
18671867
included_mod_stack: Lock::new(Vec::new()),
1868-
code_map: cm,
1868+
source_map: sm,
18691869
missing_fragment_specifiers: Lock::new(FxHashSet::default()),
18701870
raw_identifier_spans: Lock::new(Vec::new()),
18711871
registered_diagnostics: Lock::new(ErrorMap::new()),
@@ -1875,20 +1875,20 @@ mod tests {
18751875
}
18761876

18771877
// open a string reader for the given string
1878-
fn setup<'a>(cm: &SourceMap,
1878+
fn setup<'a>(sm: &SourceMap,
18791879
sess: &'a ParseSess,
18801880
teststr: String)
18811881
-> StringReader<'a> {
1882-
let fm = cm.new_source_file(PathBuf::from("zebra.rs").into(), teststr);
1883-
StringReader::new(sess, fm, None)
1882+
let sf = sm.new_source_file(PathBuf::from("zebra.rs").into(), teststr);
1883+
StringReader::new(sess, sf, None)
18841884
}
18851885

18861886
#[test]
18871887
fn t1() {
18881888
with_globals(|| {
1889-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1890-
let sh = mk_sess(cm.clone());
1891-
let mut string_reader = setup(&cm,
1889+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1890+
let sh = mk_sess(sm.clone());
1891+
let mut string_reader = setup(&sm,
18921892
&sh,
18931893
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
18941894
.to_string());
@@ -1934,89 +1934,89 @@ mod tests {
19341934
#[test]
19351935
fn doublecolonparsing() {
19361936
with_globals(|| {
1937-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1938-
let sh = mk_sess(cm.clone());
1939-
check_tokenization(setup(&cm, &sh, "a b".to_string()),
1937+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1938+
let sh = mk_sess(sm.clone());
1939+
check_tokenization(setup(&sm, &sh, "a b".to_string()),
19401940
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
19411941
})
19421942
}
19431943

19441944
#[test]
19451945
fn dcparsing_2() {
19461946
with_globals(|| {
1947-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1948-
let sh = mk_sess(cm.clone());
1949-
check_tokenization(setup(&cm, &sh, "a::b".to_string()),
1947+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1948+
let sh = mk_sess(sm.clone());
1949+
check_tokenization(setup(&sm, &sh, "a::b".to_string()),
19501950
vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
19511951
})
19521952
}
19531953

19541954
#[test]
19551955
fn dcparsing_3() {
19561956
with_globals(|| {
1957-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1958-
let sh = mk_sess(cm.clone());
1959-
check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
1957+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1958+
let sh = mk_sess(sm.clone());
1959+
check_tokenization(setup(&sm, &sh, "a ::b".to_string()),
19601960
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
19611961
})
19621962
}
19631963

19641964
#[test]
19651965
fn dcparsing_4() {
19661966
with_globals(|| {
1967-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1968-
let sh = mk_sess(cm.clone());
1969-
check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
1967+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1968+
let sh = mk_sess(sm.clone());
1969+
check_tokenization(setup(&sm, &sh, "a:: b".to_string()),
19701970
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
19711971
})
19721972
}
19731973

19741974
#[test]
19751975
fn character_a() {
19761976
with_globals(|| {
1977-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1978-
let sh = mk_sess(cm.clone());
1979-
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
1977+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1978+
let sh = mk_sess(sm.clone());
1979+
assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok,
19801980
token::Literal(token::Char(Symbol::intern("a")), None));
19811981
})
19821982
}
19831983

19841984
#[test]
19851985
fn character_space() {
19861986
with_globals(|| {
1987-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1988-
let sh = mk_sess(cm.clone());
1989-
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
1987+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1988+
let sh = mk_sess(sm.clone());
1989+
assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok,
19901990
token::Literal(token::Char(Symbol::intern(" ")), None));
19911991
})
19921992
}
19931993

19941994
#[test]
19951995
fn character_escaped() {
19961996
with_globals(|| {
1997-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1998-
let sh = mk_sess(cm.clone());
1999-
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
1997+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
1998+
let sh = mk_sess(sm.clone());
1999+
assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok,
20002000
token::Literal(token::Char(Symbol::intern("\\n")), None));
20012001
})
20022002
}
20032003

20042004
#[test]
20052005
fn lifetime_name() {
20062006
with_globals(|| {
2007-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2008-
let sh = mk_sess(cm.clone());
2009-
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
2007+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2008+
let sh = mk_sess(sm.clone());
2009+
assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok,
20102010
token::Lifetime(Ident::from_str("'abc")));
20112011
})
20122012
}
20132013

20142014
#[test]
20152015
fn raw_string() {
20162016
with_globals(|| {
2017-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2018-
let sh = mk_sess(cm.clone());
2019-
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
2017+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2018+
let sh = mk_sess(sm.clone());
2019+
assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
20202020
.next_token()
20212021
.tok,
20222022
token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
@@ -2026,15 +2026,15 @@ mod tests {
20262026
#[test]
20272027
fn literal_suffixes() {
20282028
with_globals(|| {
2029-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2030-
let sh = mk_sess(cm.clone());
2029+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2030+
let sh = mk_sess(sm.clone());
20312031
macro_rules! test {
20322032
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
2033-
assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
2033+
assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok,
20342034
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
20352035
Some(Symbol::intern("suffix"))));
20362036
// with a whitespace separator:
2037-
assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
2037+
assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok,
20382038
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
20392039
None));
20402040
}}
@@ -2050,13 +2050,13 @@ mod tests {
20502050
test!("1.0", Float, "1.0");
20512051
test!("1.0e10", Float, "1.0e10");
20522052

2053-
assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
2053+
assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok,
20542054
token::Literal(token::Integer(Symbol::intern("2")),
20552055
Some(Symbol::intern("us"))));
2056-
assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
2056+
assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
20572057
token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
20582058
Some(Symbol::intern("suffix"))));
2059-
assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
2059+
assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
20602060
token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
20612061
Some(Symbol::intern("suffix"))));
20622062
})
@@ -2072,9 +2072,9 @@ mod tests {
20722072
#[test]
20732073
fn nested_block_comments() {
20742074
with_globals(|| {
2075-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2076-
let sh = mk_sess(cm.clone());
2077-
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
2075+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2076+
let sh = mk_sess(sm.clone());
2077+
let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
20782078
match lexer.next_token().tok {
20792079
token::Comment => {}
20802080
_ => panic!("expected a comment!"),
@@ -2087,9 +2087,9 @@ mod tests {
20872087
#[test]
20882088
fn crlf_comments() {
20892089
with_globals(|| {
2090-
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2091-
let sh = mk_sess(cm.clone());
2092-
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
2090+
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
2091+
let sh = mk_sess(sm.clone());
2092+
let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
20932093
let comment = lexer.next_token();
20942094
assert_eq!(comment.tok, token::Comment);
20952095
assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));

‎src/libsyntax/parse/mod.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ pub struct ParseSess {
5757
pub non_modrs_mods: Lock<Vec<(ast::Ident, Span)>>,
5858
/// Used to determine and report recursive mod inclusions
5959
included_mod_stack: Lock<Vec<PathBuf>>,
60-
code_map: Lrc<SourceMap>,
60+
source_map: Lrc<SourceMap>,
6161
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
6262
}
6363

@@ -71,7 +71,7 @@ impl ParseSess {
7171
ParseSess::with_span_handler(handler, cm)
7272
}
7373

74-
pub fn with_span_handler(handler: Handler, code_map: Lrc<SourceMap>) -> ParseSess {
74+
pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> ParseSess {
7575
ParseSess {
7676
span_diagnostic: handler,
7777
unstable_features: UnstableFeatures::from_environment(),
@@ -80,14 +80,14 @@ impl ParseSess {
8080
raw_identifier_spans: Lock::new(Vec::new()),
8181
registered_diagnostics: Lock::new(ErrorMap::new()),
8282
included_mod_stack: Lock::new(vec![]),
83-
code_map,
83+
source_map,
8484
non_modrs_mods: Lock::new(vec![]),
8585
buffered_lints: Lock::new(vec![]),
8686
}
8787
}
8888

8989
pub fn source_map(&self) -> &SourceMap {
90-
&self.code_map
90+
&self.source_map
9191
}
9292

9393
pub fn buffer_lint<S: Into<MultiSpan>>(&self,

‎src/libsyntax/source_map.rs

Lines changed: 92 additions & 91 deletions
Large diffs are not rendered by default.

‎src/libsyntax/test_snippet.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -50,20 +50,20 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
5050
with_globals(|| {
5151
let output = Arc::new(Mutex::new(Vec::new()));
5252

53-
let code_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
54-
code_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
53+
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
54+
source_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned());
5555

5656
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
5757
let mut msp = MultiSpan::from_span(primary_span);
5858
for span_label in span_labels {
5959
let span = make_span(&file_text, &span_label.start, &span_label.end);
6060
msp.push_span_label(span, span_label.label.to_string());
6161
println!("span: {:?} label: {:?}", span, span_label.label);
62-
println!("text: {:?}", code_map.span_to_snippet(span));
62+
println!("text: {:?}", source_map.span_to_snippet(span));
6363
}
6464

6565
let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }),
66-
Some(code_map.clone()),
66+
Some(source_map.clone()),
6767
false,
6868
false);
6969
let handler = Handler::with_emitter(true, false, Box::new(emitter));

‎src/libsyntax_pos/analyze_source_file.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,9 @@ pub fn analyze_source_file(
3636
// it encounters. If that point is already outside the source_file, remove
3737
// it again.
3838
if let Some(&last_line_start) = lines.last() {
39-
let file_map_end = source_file_start_pos + BytePos::from_usize(src.len());
40-
assert!(file_map_end >= last_line_start);
41-
if last_line_start == file_map_end {
39+
let source_file_end = source_file_start_pos + BytePos::from_usize(src.len());
40+
assert!(source_file_end >= last_line_start);
41+
if last_line_start == source_file_end {
4242
lines.pop();
4343
}
4444
}

‎src/libsyntax_pos/lib.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1266,9 +1266,9 @@ pub struct LocWithOpt {
12661266

12671267
// used to be structural records. Better names, anyone?
12681268
#[derive(Debug)]
1269-
pub struct SourceFileAndLine { pub fm: Lrc<SourceFile>, pub line: usize }
1269+
pub struct SourceFileAndLine { pub sf: Lrc<SourceFile>, pub line: usize }
12701270
#[derive(Debug)]
1271-
pub struct SourceFileAndBytePos { pub fm: Lrc<SourceFile>, pub pos: BytePos }
1271+
pub struct SourceFileAndBytePos { pub sf: Lrc<SourceFile>, pub pos: BytePos }
12721272

12731273
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
12741274
pub struct LineInfo {
@@ -1303,7 +1303,7 @@ pub struct MacroBacktrace {
13031303
}
13041304

13051305
// _____________________________________________________________________________
1306-
// SpanLinesError, SpanSnippetError, DistinctSources, MalformedCodemapPositions
1306+
// SpanLinesError, SpanSnippetError, DistinctSources, MalformedSourceMapPositions
13071307
//
13081308

13091309
pub type FileLinesResult = Result<FileLines, SpanLinesError>;
@@ -1318,7 +1318,7 @@ pub enum SpanLinesError {
13181318
pub enum SpanSnippetError {
13191319
IllFormedSpan(Span),
13201320
DistinctSources(DistinctSources),
1321-
MalformedForCodemap(MalformedCodemapPositions),
1321+
MalformedForSourcemap(MalformedSourceMapPositions),
13221322
SourceNotAvailable { filename: FileName }
13231323
}
13241324

@@ -1329,7 +1329,7 @@ pub struct DistinctSources {
13291329
}
13301330

13311331
#[derive(Clone, PartialEq, Eq, Debug)]
1332-
pub struct MalformedCodemapPositions {
1332+
pub struct MalformedSourceMapPositions {
13331333
pub name: FileName,
13341334
pub source_len: usize,
13351335
pub begin_pos: BytePos,

‎src/test/run-pass/imports/import-crate-with-invalid-spans/auxiliary/crate_with_invalid_spans.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@ pub fn exported_generic<T>(x: T, y: u32) -> (T, u32) {
2323
// The AST node for the (1 + y) expression generated by the macro will then
2424
// take it's `lo` span bound from the `1` literal in the macro-defining file
2525
// and it's `hi` bound from `y` in this file, which should be lower than the
26-
// `lo` and even lower than the lower bound of the FileMap it is supposedly
27-
// contained in because the FileMap for this file was allocated earlier than
28-
// the FileMap of the macro-defining file.
26+
// `lo` and even lower than the lower bound of the SourceFile it is supposedly
27+
// contained in because the SourceFile for this file was allocated earlier than
28+
// the SourceFile of the macro-defining file.
2929
return (x, add1!(y));
3030
}

0 commit comments

Comments
 (0)
Please sign in to comment.