diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a7be6aec..d0641a84 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -18,7 +18,7 @@ jobs: - nightly - beta - stable - - 1.63.0 + - 1.66.0 features: - - --features dummy_match_byte diff --git a/Cargo.toml b/Cargo.toml index 6604f20e..90e0b573 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,7 @@ readme = "README.md" keywords = ["css", "syntax", "parser"] license = "MPL-2.0" edition = "2018" -rust-version = "1.63" +rust-version = "1.66" exclude = ["src/css-parsing-tests/**", "src/big-data-url.css"] @@ -23,7 +23,7 @@ encoding_rs = "0.8" cssparser-macros = { path = "./macros", version = "0.6.1" } dtoa-short = "0.3" itoa = "1.0" -phf = { version = "0.11.2", features = ["macros"] } +phf = { version = "0.13.1", features = ["macros"] } serde = { version = "1.0", features = ["derive"], optional = true } malloc_size_of = { version = "0.1", default-features = false, optional = true } smallvec = "1.0" diff --git a/macros/lib.rs b/macros/lib.rs index 8b40bd4c..dc7b36e3 100644 --- a/macros/lib.rs +++ b/macros/lib.rs @@ -162,7 +162,7 @@ pub fn match_byte(input: TokenStream) -> TokenStream { for (i, ref arm) in arms.iter().enumerate() { let case_id = i + 1; let index = case_id as isize; - let name = syn::Ident::new(&format!("Case{}", case_id), arm.span()); + let name = syn::Ident::new(&format!("Case{case_id}"), arm.span()); let pat = &arm.pat; parse_pat_to_table(pat, case_id as u8, &mut wildcard, &mut table); @@ -177,7 +177,7 @@ pub fn match_byte(input: TokenStream) -> TokenStream { let mut table_content = Vec::new(); for entry in table.iter() { - let name: syn::Path = syn::parse_str(&format!("Case::Case{}", entry)).unwrap(); + let name: syn::Path = syn::parse_str(&format!("Case::Case{entry}")).unwrap(); table_content.push(name); } let table = quote::quote!(static __CASES: [Case; 256] = [#(#table_content),*];); diff --git a/src/lib.rs b/src/lib.rs index dc44fb74..a7460a43 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -41,7 +41,7 @@ As a consequence, when calling another parsing function, either: Examples: -```{rust,ignore} +```rust,ignore // 'none' | fn parse_background_image(context: &ParserContext, input: &mut Parser) -> Result, ()> { @@ -53,7 +53,7 @@ fn parse_background_image(context: &ParserContext, input: &mut Parser) } ``` -```{rust,ignore} +```rust,ignore // [ | ] [ | ]? fn parse_border_spacing(_context: &ParserContext, input: &mut Parser) -> Result<(LengthOrPercentage, LengthOrPercentage), ()> { diff --git a/src/parser.rs b/src/parser.rs index aabeea6a..82ec4b00 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -80,11 +80,11 @@ impl fmt::Display for BasicParseErrorKind<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { BasicParseErrorKind::UnexpectedToken(token) => { - write!(f, "unexpected token: {:?}", token) + write!(f, "unexpected token: {token:?}") } BasicParseErrorKind::EndOfInput => write!(f, "unexpected end of input"), BasicParseErrorKind::AtRuleInvalid(rule) => { - write!(f, "invalid @ rule encountered: '@{}'", rule) + write!(f, "invalid @ rule encountered: '@{rule}'") } BasicParseErrorKind::AtRuleBodyInvalid => write!(f, "invalid @ rule body encountered"), BasicParseErrorKind::QualifiedRuleInvalid => { @@ -295,7 +295,7 @@ impl BlockType { /// /// The union of two sets can be obtained with the `|` operator. Example: /// -/// ```{rust,ignore} +/// ```rust,ignore /// input.parse_until_before(Delimiter::CurlyBracketBlock | Delimiter::Semicolon) /// ``` #[derive(Copy, Clone, PartialEq, Eq, Debug)] diff --git a/src/serializer.rs b/src/serializer.rs index 6696a622..ed325fc9 100644 --- a/src/serializer.rs +++ b/src/serializer.rs @@ -286,7 +286,7 @@ where /// /// Typical usage: /// -/// ```{rust,ignore} +/// ```rust,ignore /// fn write_foo(foo: &Foo, dest: &mut W) -> fmt::Result where W: fmt::Write { /// dest.write_str("\"")?; /// { diff --git a/src/tests.rs b/src/tests.rs index 3c122f0d..d7b4833e 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -794,7 +794,7 @@ fn delimiter_from_byte(b: &mut Bencher) { } #[cfg(feature = "bench")] -const BACKGROUND_IMAGE: &'static str = include_str!("big-data-url.css"); +const BACKGROUND_IMAGE: &str = include_str!("big-data-url.css"); #[cfg(feature = "bench")] #[bench] diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 1c85e10a..1e2bb737 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -229,7 +229,7 @@ enum SeenStatus { impl<'a> Tokenizer<'a> { #[inline] - pub fn new(input: &str) -> Tokenizer { + pub fn new(input: &'a str) -> Self { Tokenizer { input, position: 0,