Skip to content

chore(docs): refine docs #1326

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 30, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 53 additions & 52 deletions src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,11 +71,11 @@ mod recursion {
use super::ParserError;

/// Tracks remaining recursion depth. This value is decremented on
/// each call to `try_decrease()`, when it reaches 0 an error will
/// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
/// be returned.
///
/// Note: Uses an Rc and Cell in order to satisfy the Rust
/// borrow checker so the automatic DepthGuard decrement a
/// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
/// borrow checker so the automatic [`DepthGuard`] decrement a
/// reference to the counter.
pub(crate) struct RecursionCounter {
remaining_depth: Rc<Cell<usize>>,
Expand All @@ -92,7 +92,7 @@ mod recursion {

/// Decreases the remaining depth by 1.
///
/// Returns `Err` if the remaining depth falls to 0.
/// Returns [`Err`] if the remaining depth falls to 0.
///
/// Returns a [`DepthGuard`] which will adds 1 to the
/// remaining depth upon drop;
Expand Down Expand Up @@ -131,7 +131,7 @@ mod recursion {
/// Implementation [`RecursionCounter`] if std is NOT available (and does not
/// guard against stack overflow).
///
/// Has the same API as the std RecursionCounter implementation
/// Has the same API as the std [`RecursionCounter`] implementation
/// but does not actually limit stack depth.
pub(crate) struct RecursionCounter {}

Expand Down Expand Up @@ -270,17 +270,17 @@ enum ParserState {

pub struct Parser<'a> {
tokens: Vec<TokenWithLocation>,
/// The index of the first unprocessed token in `self.tokens`
/// The index of the first unprocessed token in [`Parser::tokens`].
index: usize,
/// The current state of the parser.
state: ParserState,
/// The current dialect to use
/// The current dialect to use.
dialect: &'a dyn Dialect,
/// Additional options that allow you to mix & match behavior
/// otherwise constrained to certain dialects (e.g. trailing
/// commas) and/or format of parse (e.g. unescaping)
/// commas) and/or format of parse (e.g. unescaping).
options: ParserOptions,
/// ensure the stack does not overflow by limiting recursion depth
/// Ensure the stack does not overflow by limiting recursion depth.
recursion_counter: RecursionCounter,
}

Expand Down Expand Up @@ -313,7 +313,6 @@ impl<'a> Parser<'a> {

/// Specify the maximum recursion limit while parsing.
///
///
/// [`Parser`] prevents stack overflows by returning
/// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
/// this depth while processing the query.
Expand All @@ -338,7 +337,6 @@ impl<'a> Parser<'a> {

/// Specify additional parser options
///
///
/// [`Parser`] supports additional options ([`ParserOptions`])
/// that allow you to mix & match behavior otherwise constrained
/// to certain dialects (e.g. trailing commas).
Expand Down Expand Up @@ -824,7 +822,7 @@ impl<'a> Parser<'a> {
})
}

/// Parse a new expression including wildcard & qualified wildcard
/// Parse a new expression including wildcard & qualified wildcard.
pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
let index = self.index;

Expand Down Expand Up @@ -867,13 +865,13 @@ impl<'a> Parser<'a> {
self.parse_expr()
}

/// Parse a new expression
/// Parse a new expression.
pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
let _guard = self.recursion_counter.try_decrease()?;
self.parse_subexpr(0)
}

/// Parse tokens until the precedence changes
/// Parse tokens until the precedence changes.
pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
debug!("parsing expr");
let mut expr = self.parse_prefix()?;
Expand Down Expand Up @@ -908,8 +906,7 @@ impl<'a> Parser<'a> {
Ok(expr)
}

/// Get the precedence of the next token
/// With AND, OR, and XOR
/// Get the precedence of the next token, with AND, OR, and XOR.
pub fn get_next_interval_precedence(&self) -> Result<u8, ParserError> {
let token = self.peek_token();

Expand Down Expand Up @@ -944,7 +941,7 @@ impl<'a> Parser<'a> {
Ok(Statement::ReleaseSavepoint { name })
}

/// Parse an expression prefix
/// Parse an expression prefix.
pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
// allow the dialect to override prefix parsing
if let Some(prefix) = self.dialect.parse_prefix(self) {
Expand Down Expand Up @@ -1456,8 +1453,7 @@ impl<'a> Parser<'a> {
}
}

/// parse a group by expr. a group by expr can be one of group sets, roll up, cube, or simple
/// expr.
/// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
if self.dialect.supports_group_by_expr() {
if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
Expand All @@ -1484,7 +1480,7 @@ impl<'a> Parser<'a> {
}
}

/// parse a tuple with `(` and `)`.
/// Parse a tuple with `(` and `)`.
/// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
/// If `allow_empty` is true, then an empty tuple is allowed.
fn parse_tuple(
Expand Down Expand Up @@ -1953,13 +1949,11 @@ impl<'a> Parser<'a> {
}
}

/// Parses fulltext expressions [(1)]
/// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
///
/// # Errors
/// This method will raise an error if the column list is empty or with invalid identifiers,
/// the match expression is not a literal string, or if the search modifier is not valid.
///
/// [(1)]: Expr::MatchAgainst
pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
let columns = self.parse_parenthesized_column_list(Mandatory, false)?;

Expand Down Expand Up @@ -2004,17 +1998,19 @@ impl<'a> Parser<'a> {
})
}

/// Parse an INTERVAL expression.
/// Parse an `INTERVAL` expression.
///
/// Some syntactically valid intervals:
///
/// 1. `INTERVAL '1' DAY`
/// 2. `INTERVAL '1-1' YEAR TO MONTH`
/// 3. `INTERVAL '1' SECOND`
/// 4. `INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)`
/// 5. `INTERVAL '1.1' SECOND (2, 2)`
/// 6. `INTERVAL '1:1' HOUR (5) TO MINUTE (5)`
/// 7. (MySql and BigQuey only):`INTERVAL 1 DAY`
/// ```sql
/// 1. INTERVAL '1' DAY
/// 2. INTERVAL '1-1' YEAR TO MONTH
/// 3. INTERVAL '1' SECOND
/// 4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
/// 5. INTERVAL '1.1' SECOND (2, 2)
/// 6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
/// 7. (MySql & BigQuey only): INTERVAL 1 DAY
/// ```
///
/// Note that we do not currently attempt to parse the quoted value.
pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
Expand Down Expand Up @@ -2210,15 +2206,15 @@ impl<'a> Parser<'a> {
))
}

/// Parse a field definition in a struct [1] or tuple [2].
/// Parse a field definition in a [struct] or [tuple].
/// Syntax:
///
/// ```sql
/// [field_name] field_type
/// ```
///
/// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
/// [2]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
/// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
/// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
fn parse_struct_field_def(
&mut self,
) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
Expand Down Expand Up @@ -2272,15 +2268,15 @@ impl<'a> Parser<'a> {
Ok(fields)
}

/// DuckDB specific: Parse a duckdb dictionary [1]
/// DuckDB specific: Parse a duckdb [dictionary]
///
/// Syntax:
///
/// ```sql
/// {'field_name': expr1[, ... ]}
/// ```
///
/// [1]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
/// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
fn parse_duckdb_struct_literal(&mut self) -> Result<Expr, ParserError> {
self.expect_token(&Token::LBrace)?;

Expand All @@ -2291,13 +2287,15 @@ impl<'a> Parser<'a> {
Ok(Expr::Dictionary(fields))
}

/// Parse a field for a duckdb dictionary [1]
/// Parse a field for a duckdb [dictionary]
///
/// Syntax
///
/// ```sql
/// 'name': expr
/// ```
///
/// [1]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
/// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
fn parse_duckdb_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
let key = self.parse_identifier(false)?;

Expand All @@ -2311,13 +2309,15 @@ impl<'a> Parser<'a> {
})
}

/// Parse clickhouse map [1]
/// Parse clickhouse [map]
///
/// Syntax
///
/// ```sql
/// Map(key_data_type, value_data_type)
/// ```
///
/// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/map
/// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
self.expect_keyword(Keyword::MAP)?;
self.expect_token(&Token::LParen)?;
Expand All @@ -2329,13 +2329,15 @@ impl<'a> Parser<'a> {
Ok((key_data_type, value_data_type))
}

/// Parse clickhouse tuple [1]
/// Parse clickhouse [tuple]
///
/// Syntax
///
/// ```sql
/// Tuple([field_name] field_type, ...)
/// ```
///
/// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
/// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
self.expect_keyword(Keyword::TUPLE)?;
self.expect_token(&Token::LParen)?;
Expand Down Expand Up @@ -2649,7 +2651,7 @@ impl<'a> Parser<'a> {
}
}

/// parse the ESCAPE CHAR portion of LIKE, ILIKE, and SIMILAR TO
/// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
pub fn parse_escape_char(&mut self) -> Result<Option<String>, ParserError> {
if self.parse_keyword(Keyword::ESCAPE) {
Ok(Some(self.parse_literal_string()?))
Expand Down Expand Up @@ -2836,7 +2838,7 @@ impl<'a> Parser<'a> {
})
}

/// Parses the parens following the `[ NOT ] IN` operator
/// Parses the parens following the `[ NOT ] IN` operator.
pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
// BigQuery allows `IN UNNEST(array_expression)`
// https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
Expand Down Expand Up @@ -2873,7 +2875,7 @@ impl<'a> Parser<'a> {
Ok(in_op)
}

/// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed
/// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
// Stop parsing subexpressions for <low> and <high> on tokens with
// precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
Expand All @@ -2888,7 +2890,7 @@ impl<'a> Parser<'a> {
})
}

/// Parse a postgresql casting style which is in the form of `expr::datatype`
/// Parse a postgresql casting style which is in the form of `expr::datatype`.
pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
Ok(Expr::Cast {
kind: CastKind::DoubleColon,
Expand All @@ -2898,7 +2900,7 @@ impl<'a> Parser<'a> {
})
}

// use https://www.postgresql.org/docs/7.0/operators.htm#AEN2026 as a reference
// Use https://www.postgresql.org/docs/7.0/operators.htm#AEN2026 as a reference
// higher number = higher precedence
//
// NOTE: The pg documentation is incomplete, e.g. the AT TIME ZONE operator
Expand Down Expand Up @@ -3217,7 +3219,7 @@ impl<'a> Parser<'a> {

/// If the current token is one of the given `keywords`, consume the token
/// and return the keyword that matches. Otherwise, no tokens are consumed
/// and returns `None`.
/// and returns [`None`].
#[must_use]
pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
match self.peek_token().token {
Expand Down Expand Up @@ -3393,8 +3395,7 @@ impl<'a> Parser<'a> {
self.parse_comma_separated(f)
}

/// Run a parser method `f`, reverting back to the current position
/// if unsuccessful.
/// Run a parser method `f`, reverting back to the current position if unsuccessful.
#[must_use]
fn maybe_parse<T, F>(&mut self, mut f: F) -> Option<T>
where
Expand All @@ -3409,8 +3410,8 @@ impl<'a> Parser<'a> {
}
}

/// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns `None` if `ALL` is parsed
/// and results in a `ParserError` if both `ALL` and `DISTINCT` are found.
/// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
/// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
let loc = self.peek_token().location;
let all = self.parse_keyword(Keyword::ALL);
Expand Down
Loading