Skip to content

Commit 1554f90

Browse files
committed
Remove &mut BufferQueue usage
Signed-off-by: Taym <[email protected]>
1 parent 34567e9 commit 1554f90

File tree

5 files changed

+52
-52
lines changed

5 files changed

+52
-52
lines changed

html5ever/src/tokenizer/char_ref/mod.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ impl CharRefTokenizer {
115115
pub(super) fn step<Sink: TokenSink>(
116116
&mut self,
117117
tokenizer: &mut Tokenizer<Sink>,
118-
input: &mut BufferQueue,
118+
input: & BufferQueue,
119119
) -> Status {
120120
if self.result.is_some() {
121121
return Done;
@@ -135,7 +135,7 @@ impl CharRefTokenizer {
135135
fn do_begin<Sink: TokenSink>(
136136
&mut self,
137137
tokenizer: &mut Tokenizer<Sink>,
138-
input: &mut BufferQueue,
138+
input: & BufferQueue,
139139
) -> Status {
140140
match unwrap_or_return!(tokenizer.peek(input), Stuck) {
141141
'a'..='z' | 'A'..='Z' | '0'..='9' => {
@@ -156,7 +156,7 @@ impl CharRefTokenizer {
156156
fn do_octothorpe<Sink: TokenSink>(
157157
&mut self,
158158
tokenizer: &mut Tokenizer<Sink>,
159-
input: &mut BufferQueue,
159+
input: & BufferQueue,
160160
) -> Status {
161161
let c = unwrap_or_return!(tokenizer.peek(input), Stuck);
162162
match c {
@@ -177,7 +177,7 @@ impl CharRefTokenizer {
177177
fn do_numeric<Sink: TokenSink>(
178178
&mut self,
179179
tokenizer: &mut Tokenizer<Sink>,
180-
input: &mut BufferQueue,
180+
input: & BufferQueue,
181181
base: u32,
182182
) -> Status {
183183
let c = unwrap_or_return!(tokenizer.peek(input), Stuck);
@@ -207,7 +207,7 @@ impl CharRefTokenizer {
207207
fn do_numeric_semicolon<Sink: TokenSink>(
208208
&mut self,
209209
tokenizer: &mut Tokenizer<Sink>,
210-
input: &mut BufferQueue,
210+
input: & BufferQueue,
211211
) -> Status {
212212
match unwrap_or_return!(tokenizer.peek(input), Stuck) {
213213
';' => tokenizer.discard_char(input),
@@ -221,7 +221,7 @@ impl CharRefTokenizer {
221221
fn unconsume_numeric<Sink: TokenSink>(
222222
&mut self,
223223
tokenizer: &mut Tokenizer<Sink>,
224-
input: &mut BufferQueue,
224+
input: & BufferQueue,
225225
) -> Status {
226226
let mut unconsume = StrTendril::from_char('#');
227227
if let Some(c) = self.hex_marker {
@@ -270,7 +270,7 @@ impl CharRefTokenizer {
270270
fn do_named<Sink: TokenSink>(
271271
&mut self,
272272
tokenizer: &mut Tokenizer<Sink>,
273-
input: &mut BufferQueue,
273+
input: & BufferQueue,
274274
) -> Status {
275275
// peek + discard skips over newline normalization, therefore making it easier to
276276
// un-consume
@@ -304,14 +304,14 @@ impl CharRefTokenizer {
304304
tokenizer.emit_error(msg);
305305
}
306306

307-
fn unconsume_name(&mut self, input: &mut BufferQueue) {
307+
fn unconsume_name(&mut self, input: & BufferQueue) {
308308
input.push_front(self.name_buf_opt.take().unwrap());
309309
}
310310

311311
fn finish_named<Sink: TokenSink>(
312312
&mut self,
313313
tokenizer: &mut Tokenizer<Sink>,
314-
input: &mut BufferQueue,
314+
input: & BufferQueue,
315315
end_char: Option<char>,
316316
) -> Status {
317317
match self.name_match {
@@ -395,7 +395,7 @@ impl CharRefTokenizer {
395395
fn do_bogus_name<Sink: TokenSink>(
396396
&mut self,
397397
tokenizer: &mut Tokenizer<Sink>,
398-
input: &mut BufferQueue,
398+
input: & BufferQueue,
399399
) -> Status {
400400
// peek + discard skips over newline normalization, therefore making it easier to
401401
// un-consume
@@ -414,7 +414,7 @@ impl CharRefTokenizer {
414414
pub(super) fn end_of_file<Sink: TokenSink>(
415415
&mut self,
416416
tokenizer: &mut Tokenizer<Sink>,
417-
input: &mut BufferQueue,
417+
input: & BufferQueue,
418418
) {
419419
while self.result.is_none() {
420420
match self.state {

html5ever/src/tokenizer/mod.rs

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
206206
}
207207

208208
/// Feed an input string into the tokenizer.
209-
pub fn feed(&mut self, input: &mut BufferQueue) -> TokenizerResult<Sink::Handle> {
209+
pub fn feed(&mut self, input: & BufferQueue) -> TokenizerResult<Sink::Handle> {
210210
if input.is_empty() {
211211
return TokenizerResult::Done;
212212
}
@@ -248,7 +248,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
248248
//§ preprocessing-the-input-stream
249249
// Get the next input character, which might be the character
250250
// 'c' that we already consumed from the buffers.
251-
fn get_preprocessed_char(&mut self, mut c: char, input: &mut BufferQueue) -> Option<char> {
251+
fn get_preprocessed_char(&mut self, mut c: char, input: & BufferQueue) -> Option<char> {
252252
if self.ignore_lf {
253253
self.ignore_lf = false;
254254
if c == '\n' {
@@ -283,7 +283,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
283283

284284
//§ tokenization
285285
// Get the next input character, if one is available.
286-
fn get_char(&mut self, input: &mut BufferQueue) -> Option<char> {
286+
fn get_char(&mut self, input: & BufferQueue) -> Option<char> {
287287
if self.reconsume {
288288
self.reconsume = false;
289289
Some(self.current_char)
@@ -294,7 +294,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
294294
}
295295
}
296296

297-
fn pop_except_from(&mut self, input: &mut BufferQueue, set: SmallCharSet) -> Option<SetResult> {
297+
fn pop_except_from(&mut self, input: & BufferQueue, set: SmallCharSet) -> Option<SetResult> {
298298
// Bail to the slow path for various corner cases.
299299
// This means that `FromSet` can contain characters not in the set!
300300
// It shouldn't matter because the fallback `FromSet` case should
@@ -321,7 +321,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
321321
// NB: this doesn't set the current input character.
322322
fn eat(
323323
&mut self,
324-
input: &mut BufferQueue,
324+
input: & BufferQueue,
325325
pat: &str,
326326
eq: fn(&u8, &u8) -> bool,
327327
) -> Option<bool> {
@@ -336,15 +336,17 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
336336
match input.eat(pat, eq) {
337337
None if self.at_eof => Some(false),
338338
None => {
339-
self.temp_buf.extend(input);
339+
while let Some(data) = input.next() {
340+
self.temp_buf.push_char(data);
341+
}
340342
None
341343
},
342344
Some(matched) => Some(matched),
343345
}
344346
}
345347

346348
/// Run the state machine for as long as we can.
347-
fn run(&mut self, input: &mut BufferQueue) -> TokenizerResult<Sink::Handle> {
349+
fn run(&mut self, input: & BufferQueue) -> TokenizerResult<Sink::Handle> {
348350
if self.opts.profile {
349351
loop {
350352
let state = self.state;
@@ -567,7 +569,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
567569
}
568570
}
569571

570-
fn discard_char(&mut self, input: &mut BufferQueue) {
572+
fn discard_char(&mut self, input: & BufferQueue) {
571573
// peek() deals in un-processed characters (no newline normalization), while get_char()
572574
// does.
573575
//
@@ -696,7 +698,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
696698
// Return true if we should be immediately re-invoked
697699
// (this just simplifies control flow vs. break / continue).
698700
#[allow(clippy::never_loop)]
699-
fn step(&mut self, input: &mut BufferQueue) -> ProcessResult<Sink::Handle> {
701+
fn step(&mut self, input: & BufferQueue) -> ProcessResult<Sink::Handle> {
700702
if self.char_ref_tokenizer.is_some() {
701703
return self.step_char_ref_tokenizer(input);
702704
}
@@ -1382,7 +1384,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
13821384
}
13831385
}
13841386

1385-
fn step_char_ref_tokenizer(&mut self, input: &mut BufferQueue) -> ProcessResult<Sink::Handle> {
1387+
fn step_char_ref_tokenizer(&mut self, input: & BufferQueue) -> ProcessResult<Sink::Handle> {
13861388
// FIXME HACK: Take and replace the tokenizer so we don't
13871389
// double-mut-borrow self. This is why it's boxed.
13881390
let mut tok = self.char_ref_tokenizer.take().unwrap();

markup5ever/util/buffer_queue.rs

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -216,15 +216,11 @@ impl BufferQueue {
216216

217217
Some(true)
218218
}
219-
}
220-
221-
impl Iterator for BufferQueue {
222-
type Item = char;
223219

224220
/// Get the next character if one is available, removing it from the queue.
225221
///
226222
/// This function manages the buffers, removing them as they become empty.
227-
fn next(&mut self) -> Option<char> {
223+
pub fn next(&self) -> Option<char> {
228224
let (result, now_empty) = match self.buffers.borrow_mut().front_mut() {
229225
None => (None, false),
230226
Some(buf) => {
@@ -251,7 +247,7 @@ mod test {
251247

252248
#[test]
253249
fn smoke_test() {
254-
let mut bq = BufferQueue::default();
250+
let bq = BufferQueue::default();
255251
assert_eq!(bq.peek(), None);
256252
assert_eq!(bq.next(), None);
257253

@@ -269,7 +265,7 @@ mod test {
269265

270266
#[test]
271267
fn can_unconsume() {
272-
let mut bq = BufferQueue::default();
268+
let bq = BufferQueue::default();
273269
bq.push_back("abc".to_tendril());
274270
assert_eq!(bq.next(), Some('a'));
275271

@@ -297,7 +293,7 @@ mod test {
297293
// This is not very comprehensive. We rely on the tokenizer
298294
// integration tests for more thorough testing with many
299295
// different input buffer splits.
300-
let mut bq = BufferQueue::default();
296+
let bq = BufferQueue::default();
301297
bq.push_back("a".to_tendril());
302298
bq.push_back("bc".to_tendril());
303299
assert_eq!(bq.eat("abcd", u8::eq_ignore_ascii_case), None);

xml5ever/src/tokenizer/char_ref/mod.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ impl CharRefTokenizer {
116116
pub fn step<Sink: TokenSink>(
117117
&mut self,
118118
tokenizer: &mut XmlTokenizer<Sink>,
119-
input: &mut BufferQueue,
119+
input: & BufferQueue,
120120
) -> Status {
121121
if self.result.is_some() {
122122
return Done;
@@ -136,7 +136,7 @@ impl CharRefTokenizer {
136136
fn do_begin<Sink: TokenSink>(
137137
&mut self,
138138
tokenizer: &mut XmlTokenizer<Sink>,
139-
input: &mut BufferQueue,
139+
input: & BufferQueue,
140140
) -> Status {
141141
match unwrap_or_return!(tokenizer.peek(input), Stuck) {
142142
'\t' | '\n' | '\x0C' | ' ' | '<' | '&' => self.finish_none(),
@@ -159,7 +159,7 @@ impl CharRefTokenizer {
159159
fn do_octothorpe<Sink: TokenSink>(
160160
&mut self,
161161
tokenizer: &mut XmlTokenizer<Sink>,
162-
input: &mut BufferQueue,
162+
input: & BufferQueue,
163163
) -> Status {
164164
let c = unwrap_or_return!(tokenizer.peek(input), Stuck);
165165
match c {
@@ -181,7 +181,7 @@ impl CharRefTokenizer {
181181
&mut self,
182182
tokenizer: &mut XmlTokenizer<Sink>,
183183
base: u32,
184-
input: &mut BufferQueue,
184+
input: & BufferQueue,
185185
) -> Status {
186186
let c = unwrap_or_return!(tokenizer.peek(input), Stuck);
187187
match c.to_digit(base) {
@@ -210,7 +210,7 @@ impl CharRefTokenizer {
210210
fn do_numeric_semicolon<Sink: TokenSink>(
211211
&mut self,
212212
tokenizer: &mut XmlTokenizer<Sink>,
213-
input: &mut BufferQueue,
213+
input: & BufferQueue,
214214
) -> Status {
215215
match unwrap_or_return!(tokenizer.peek(input), Stuck) {
216216
';' => tokenizer.discard_char(input),
@@ -224,7 +224,7 @@ impl CharRefTokenizer {
224224
fn unconsume_numeric<Sink: TokenSink>(
225225
&mut self,
226226
tokenizer: &mut XmlTokenizer<Sink>,
227-
input: &mut BufferQueue,
227+
input: & BufferQueue,
228228
) -> Status {
229229
let mut unconsume = StrTendril::from_char('#');
230230
if let Some(c) = self.hex_marker {
@@ -273,7 +273,7 @@ impl CharRefTokenizer {
273273
fn do_named<Sink: TokenSink>(
274274
&mut self,
275275
tokenizer: &mut XmlTokenizer<Sink>,
276-
input: &mut BufferQueue,
276+
input: & BufferQueue,
277277
) -> Status {
278278
let c = unwrap_or_return!(tokenizer.get_char(input), Stuck);
279279
self.name_buf_mut().push_char(c);
@@ -307,7 +307,7 @@ impl CharRefTokenizer {
307307
fn unconsume_name<Sink: TokenSink>(
308308
&mut self,
309309
tokenizer: &mut XmlTokenizer<Sink>,
310-
input: &mut BufferQueue,
310+
input: & BufferQueue,
311311
) {
312312
tokenizer.unconsume(input, self.name_buf_opt.take().unwrap());
313313
}
@@ -316,7 +316,7 @@ impl CharRefTokenizer {
316316
&mut self,
317317
tokenizer: &mut XmlTokenizer<Sink>,
318318
end_char: Option<char>,
319-
input: &mut BufferQueue,
319+
input: & BufferQueue,
320320
) -> Status {
321321
match self.name_match {
322322
None => {
@@ -404,7 +404,7 @@ impl CharRefTokenizer {
404404
fn do_bogus_name<Sink: TokenSink>(
405405
&mut self,
406406
tokenizer: &mut XmlTokenizer<Sink>,
407-
input: &mut BufferQueue,
407+
input: & BufferQueue,
408408
) -> Status {
409409
let c = unwrap_or_return!(tokenizer.get_char(input), Stuck);
410410
self.name_buf_mut().push_char(c);
@@ -420,7 +420,7 @@ impl CharRefTokenizer {
420420
pub fn end_of_file<Sink: TokenSink>(
421421
&mut self,
422422
tokenizer: &mut XmlTokenizer<Sink>,
423-
input: &mut BufferQueue,
423+
input: & BufferQueue,
424424
) {
425425
while self.result.is_none() {
426426
match self.state {

0 commit comments

Comments
 (0)