diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 4253dd80a..5a9afdbef 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -229,7 +229,7 @@ impl Token { Token::Word(Word { value: word.to_string(), quote_style, - keyword: if quote_style == None { + keyword: if quote_style.is_none() { let keyword = ALL_KEYWORDS.binary_search(&word_uppercase.as_str()); keyword.map_or(Keyword::NoKeyword, |x| ALL_KEYWORDS_INDEX[x]) } else { @@ -354,11 +354,11 @@ impl<'a> Tokenizer<'a> { } Token::Whitespace(Whitespace::Tab) => self.col += 4, - Token::Word(w) if w.quote_style == None => { - self.col += w.value.chars().count() as u64 - } - Token::Word(w) if w.quote_style != None => { - self.col += w.value.chars().count() as u64 + 2 + Token::Word(w) => { + self.col += w.value.chars().count() as u64; + if w.quote_style.is_some() { + self.col += 2 + } } Token::Number(s, _) => self.col += s.chars().count() as u64, Token::SingleQuotedString(s) => self.col += s.chars().count() as u64,