@@ -344,6 +344,20 @@ impl<'a> Tokenizer<'a> {
344
344
pub fn advance ( & mut self , n : usize ) { self . position += n }
345
345
346
346
// Assumes non-EOF
347
+ #[ inline]
348
+ fn next_byte_unchecked ( & self ) -> u8 { self . byte_at ( 0 ) }
349
+
350
+ #[ inline]
351
+ fn byte_at ( & self , offset : usize ) -> u8 {
352
+ self . input . as_bytes ( ) [ self . position + offset]
353
+ }
354
+
355
+ #[ inline]
356
+ fn consume_byte ( & mut self ) -> u8 {
357
+ self . position += 1 ;
358
+ self . input . as_bytes ( ) [ self . position - 1 ]
359
+ }
360
+
347
361
#[ inline]
348
362
fn next_char ( & self ) -> char { self . char_at ( 0 ) }
349
363
@@ -812,11 +826,11 @@ fn consume_numeric<'a>(tokenizer: &mut Tokenizer<'a>) -> Token<'a> {
812
826
813
827
814
828
fn consume_unquoted_url < ' a > ( tokenizer : & mut Tokenizer < ' a > ) -> Result < Token < ' a > , ( ) > {
815
- for ( offset, c) in tokenizer. input [ tokenizer. position ..] . char_indices ( ) {
829
+ for ( offset, c) in tokenizer. input [ tokenizer. position ..] . as_bytes ( ) . iter ( ) . cloned ( ) . enumerate ( ) {
816
830
match c {
817
- ' ' | '\t' | '\n' | '\r' | '\x0C' => { } ,
818
- '"' | '\'' => return Err ( ( ) ) , // Do not advance
819
- ')' => {
831
+ b ' ' | b '\t' | b '\n' | b '\r' | b '\x0C' => { } ,
832
+ b '"' | b '\'' => return Err ( ( ) ) , // Do not advance
833
+ b ')' => {
820
834
tokenizer. advance ( offset + 1 ) ;
821
835
return Ok ( UnquotedUrl ( Borrowed ( "" ) ) ) ;
822
836
}
@@ -836,28 +850,28 @@ fn consume_unquoted_url<'a>(tokenizer: &mut Tokenizer<'a>) -> Result<Token<'a>,
836
850
if tokenizer. is_eof ( ) {
837
851
return UnquotedUrl ( Borrowed ( tokenizer. slice_from ( start_pos) ) )
838
852
}
839
- match tokenizer. next_char ( ) {
840
- ' ' | '\t' | '\n' | '\r' | '\x0C' => {
853
+ match tokenizer. next_byte_unchecked ( ) {
854
+ b ' ' | b '\t' | b '\n' | b '\r' | b '\x0C' => {
841
855
let value = tokenizer. slice_from ( start_pos) ;
842
856
tokenizer. advance ( 1 ) ;
843
857
return consume_url_end ( tokenizer, Borrowed ( value) )
844
858
}
845
- ')' => {
859
+ b ')' => {
846
860
let value = tokenizer. slice_from ( start_pos) ;
847
861
tokenizer. advance ( 1 ) ;
848
862
return UnquotedUrl ( Borrowed ( value) )
849
863
}
850
- '\x01' ...'\x08' | '\x0B' | '\x0E' ...'\x1F' | '\x7F' // non-printable
851
- | '"' | '\'' | '(' => {
864
+ b '\x01' ...b '\x08' | b '\x0B' | b '\x0E' ...b '\x1F' | b '\x7F' // non-printable
865
+ | b '"' | b '\'' | b '(' => {
852
866
tokenizer. advance ( 1 ) ;
853
867
return consume_bad_url ( tokenizer)
854
868
} ,
855
- '\\' | '\0' => {
869
+ b '\\' | b '\0' => {
856
870
string = tokenizer. slice_from ( start_pos) . to_owned ( ) ;
857
871
break
858
872
}
859
873
_ => {
860
- tokenizer. consume_char ( ) ;
874
+ tokenizer. consume_byte ( ) ;
861
875
}
862
876
}
863
877
}
0 commit comments