Skip to content

fix: Fix clippy attribute completions always inserting clippy:: #10562

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Oct 17, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 31 additions & 23 deletions crates/ide_completion/src/completions/attribute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@

use hir::HasAttrs;
use ide_db::helpers::generated_lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES};
use itertools::Itertools;
use once_cell::sync::Lazy;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{algo::non_trivia_sibling, ast, AstNode, Direction, NodeOrToken, SyntaxKind, T};
use rustc_hash::FxHashMap;
use syntax::{algo::non_trivia_sibling, ast, AstNode, Direction, SyntaxKind, T};

use crate::{
context::CompletionContext,
Expand Down Expand Up @@ -303,31 +304,38 @@ const ATTRIBUTES: &[AttrCompletion] = &[
.prefer_inner(),
];

fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Option<FxHashSet<String>> {
let (l_paren, r_paren) = derive_input.l_paren_token().zip(derive_input.r_paren_token())?;
let mut input_derives = FxHashSet::default();
let mut tokens = derive_input
fn parse_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
let r_paren = input.r_paren_token()?;
let tokens = input
.syntax()
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.skip_while(|token| token != &l_paren)
.skip(1)
.take_while(|token| token != &r_paren)
.peekable();
let mut input = String::new();
while tokens.peek().is_some() {
for token in tokens.by_ref().take_while(|t| t.kind() != T![,]) {
input.push_str(token.text());
}

if !input.is_empty() {
input_derives.insert(input.trim().to_owned());
}

input.clear();
}
.take_while(|it| it.as_token() != Some(&r_paren));
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
Some(
input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok())
.collect::<Vec<ast::Path>>(),
)
}

Some(input_derives)
fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
let r_paren = input.r_paren_token()?;
let tokens = input
.syntax()
.children_with_tokens()
.skip(1)
.take_while(|it| it.as_token() != Some(&r_paren));
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
Some(
input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|mut tokens| ast::Expr::parse(&tokens.join("")).ok())
.collect::<Vec<ast::Expr>>(),
)
}

#[test]
Expand Down
31 changes: 18 additions & 13 deletions crates/ide_completion/src/completions/attribute/derive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
use hir::HasAttrs;
use itertools::Itertools;
use rustc_hash::FxHashMap;
use syntax::ast;
use syntax::{ast, SmolStr};

use crate::{
context::CompletionContext,
Expand All @@ -15,26 +15,31 @@ pub(super) fn complete_derive(
ctx: &CompletionContext,
derive_input: ast::TokenTree,
) {
if let Some(existing_derives) = super::parse_comma_sep_input(derive_input) {
if let Some(existing_derives) = super::parse_comma_sep_paths(derive_input) {
for (derive, docs) in get_derive_names_in_scope(ctx) {
let label;
let (label, lookup) = if let Some(derive_completion) = DEFAULT_DERIVE_COMPLETIONS
.iter()
.find(|derive_completion| derive_completion.label == derive)
{
let mut components = vec![derive_completion.label];
components.extend(
derive_completion
.dependencies
components.extend(derive_completion.dependencies.iter().filter(|&&dependency| {
!existing_derives
.iter()
.filter(|&&dependency| !existing_derives.contains(dependency)),
);
.filter_map(|it| it.as_single_name_ref())
.any(|it| it.text() == dependency)
}));
let lookup = components.join(", ");
let label = components.iter().rev().join(", ");
(label, Some(lookup))
} else if existing_derives.contains(&derive) {
label = components.iter().rev().join(", ");
(&*label, Some(lookup))
} else if existing_derives
.iter()
.filter_map(|it| it.as_single_name_ref())
.any(|it| it.text().as_str() == derive)
{
continue;
} else {
(derive, None)
(&*derive, None)
};
let mut item =
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label);
Expand All @@ -52,12 +57,12 @@ pub(super) fn complete_derive(

fn get_derive_names_in_scope(
ctx: &CompletionContext,
) -> FxHashMap<String, Option<hir::Documentation>> {
) -> FxHashMap<SmolStr, Option<hir::Documentation>> {
let mut result = FxHashMap::default();
ctx.process_all_names(&mut |name, scope_def| {
if let hir::ScopeDef::MacroDef(mac) = scope_def {
if mac.kind() == hir::MacroKind::Derive {
result.insert(name.to_string(), mac.docs(ctx.db));
result.insert(name.to_smol_str(), mac.docs(ctx.db));
}
}
});
Expand Down
56 changes: 45 additions & 11 deletions crates/ide_completion/src/completions/attribute/lint.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
//! Completion for lints
use ide_db::helpers::generated_lints::Lint;
use syntax::ast;
use syntax::{ast, T};

use crate::{
context::CompletionContext,
Expand All @@ -14,17 +14,51 @@ pub(super) fn complete_lint(
derive_input: ast::TokenTree,
lints_completions: &[Lint],
) {
if let Some(existing_lints) = super::parse_comma_sep_input(derive_input) {
for lint_completion in
lints_completions.iter().filter(|completion| !existing_lints.contains(completion.label))
{
let mut item = CompletionItem::new(
CompletionKind::Attribute,
ctx.source_range(),
lint_completion.label,
);
if let Some(existing_lints) = super::parse_comma_sep_paths(derive_input) {
for &Lint { label, description } in lints_completions {
let (qual, name) = {
// FIXME: change `Lint`'s label to not store a path in it but split the prefix off instead?
let mut parts = label.split("::");
let ns_or_label = match parts.next() {
Some(it) => it,
None => continue,
};
let label = parts.next();
match label {
Some(label) => (Some(ns_or_label), label),
None => (None, ns_or_label),
}
};
let lint_already_annotated = existing_lints
.iter()
.filter_map(|path| {
let q = path.qualifier();
if q.as_ref().and_then(|it| it.qualifier()).is_some() {
return None;
}
Some((q.and_then(|it| it.as_single_name_ref()), path.segment()?.name_ref()?))
})
.any(|(q, name_ref)| {
let qualifier_matches = match (q, qual) {
(None, None) => true,
(None, Some(_)) => false,
(Some(_), None) => false,
(Some(q), Some(ns)) => q.text() == ns,
};
qualifier_matches && name_ref.text() == name
});
if lint_already_annotated {
continue;
}
let insert = match qual {
Some(qual) if !ctx.previous_token_is(T![:]) => format!("{}::{}", qual, name),
_ => name.to_owned(),
};
let mut item =
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label);
item.kind(CompletionItemKind::Attribute)
.documentation(hir::Documentation::new(lint_completion.description.to_owned()));
.insert_text(insert)
.documentation(hir::Documentation::new(description.to_owned()));
item.add_to(acc)
}
}
Expand Down
39 changes: 22 additions & 17 deletions crates/ide_completion/src/completions/attribute/repr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,29 +8,34 @@ use crate::{
Completions,
};

pub(super) fn complete_repr(
acc: &mut Completions,
ctx: &CompletionContext,
derive_input: ast::TokenTree,
) {
if let Some(existing_reprs) = super::parse_comma_sep_input(derive_input) {
for repr_completion in REPR_COMPLETIONS {
if existing_reprs
pub(super) fn complete_repr(acc: &mut Completions, ctx: &CompletionContext, input: ast::TokenTree) {
if let Some(existing_reprs) = super::parse_comma_sep_expr(input) {
for &ReprCompletion { label, snippet, lookup, collides } in REPR_COMPLETIONS {
let repr_already_annotated = existing_reprs
.iter()
.any(|it| repr_completion.label == it || repr_completion.collides.contains(&&**it))
{
.filter_map(|expr| match expr {
ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
ast::Expr::CallExpr(call) => match call.expr()? {
ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
_ => return None,
},
_ => None,
})
.any(|it| {
let text = it.text();
lookup.unwrap_or(label) == text || collides.contains(&text.as_str())
});
if repr_already_annotated {
continue;
}
let mut item = CompletionItem::new(
CompletionKind::Attribute,
ctx.source_range(),
repr_completion.label,
);

let mut item =
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label);
item.kind(CompletionItemKind::Attribute);
if let Some(lookup) = repr_completion.lookup {
if let Some(lookup) = lookup {
item.lookup_by(lookup);
}
if let Some((snippet, cap)) = repr_completion.snippet.zip(ctx.config.snippet_cap) {
if let Some((snippet, cap)) = snippet.zip(ctx.config.snippet_cap) {
item.insert_snippet(cap, snippet);
}
item.add_to(acc);
Expand Down
Loading