Skip to content

Commit

Permalink
Use Jupyter mode for lexing
Browse files Browse the repository at this point in the history
  • Loading branch information
dhruvmanila committed Jul 26, 2023
1 parent e8eddd2 commit 69ee5e4
Showing 28 changed files with 261 additions and 72 deletions.
12 changes: 9 additions & 3 deletions crates/ruff/src/autofix/edits.rs
Original file line number Diff line number Diff line change
@@ -81,9 +81,15 @@ pub(crate) fn remove_argument(
args: &[Expr],
keywords: &[Keyword],
remove_parentheses: bool,
is_jupyter_notebook: bool,
) -> Result<Edit> {
// TODO(sbrugman): Preserve trailing comments.
let contents = locator.after(call_at);
let mode = if is_jupyter_notebook {
Mode::Jupyter
} else {
Mode::Module
};

let mut fix_start = None;
let mut fix_end = None;
@@ -96,7 +102,7 @@ pub(crate) fn remove_argument(
if n_arguments == 1 {
// Case 1: there is only one argument.
let mut count = 0u32;
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
for (tok, range) in lexer::lex_starts_at(contents, mode, call_at).flatten() {
if tok.is_lpar() {
if count == 0 {
fix_start = Some(if remove_parentheses {
@@ -128,7 +134,7 @@ pub(crate) fn remove_argument(
{
// Case 2: argument or keyword is _not_ the last node.
let mut seen_comma = false;
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
for (tok, range) in lexer::lex_starts_at(contents, mode, call_at).flatten() {
if seen_comma {
if tok.is_non_logical_newline() {
// Also delete any non-logical newlines after the comma.
@@ -151,7 +157,7 @@ pub(crate) fn remove_argument(
} else {
// Case 3: argument or keyword is the last node, so we have to find the last
// comma in the stmt.
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
for (tok, range) in lexer::lex_starts_at(contents, mode, call_at).flatten() {
if range.start() == expr_range.start() {
fix_end = Some(expr_range.end());
break;
5 changes: 4 additions & 1 deletion crates/ruff/src/checkers/ast/mod.rs
Original file line number Diff line number Diff line change
@@ -52,7 +52,7 @@ use ruff_python_semantic::{
ModuleKind, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, StarImport, SubmoduleImport,
};
use ruff_python_stdlib::builtins::{BUILTINS, MAGIC_GLOBALS};
use ruff_python_stdlib::path::is_python_stub_file;
use ruff_python_stdlib::path::{is_jupyter_notebook, is_python_stub_file};
use ruff_source_file::Locator;

use crate::checkers::ast::deferred::Deferred;
@@ -76,6 +76,8 @@ pub(crate) struct Checker<'a> {
module_path: Option<&'a [String]>,
/// Whether the current file is a stub (`.pyi`) file.
is_stub: bool,
/// Whether the current file is a Jupyter notebook (`.ipynb`) file.
pub(crate) is_jupyter_notebook: bool,
/// The [`flags::Noqa`] for the current analysis (i.e., whether to respect suppression
/// comments).
noqa: flags::Noqa,
@@ -126,6 +128,7 @@ impl<'a> Checker<'a> {
package,
module_path: module.path(),
is_stub: is_python_stub_file(path),
is_jupyter_notebook: is_jupyter_notebook(path),
locator,
stylist,
indexer,
8 changes: 7 additions & 1 deletion crates/ruff/src/checkers/imports.rs
Original file line number Diff line number Diff line change
@@ -104,7 +104,13 @@ pub(crate) fn check_imports(
for block in &blocks {
if !block.imports.is_empty() {
if let Some(diagnostic) = isort::rules::organize_imports(
block, locator, stylist, indexer, settings, package,
block,
locator,
stylist,
indexer,
settings,
package,
source_kind.map_or(false, SourceKind::is_jupyter),
) {
diagnostics.push(diagnostic);
}
12 changes: 9 additions & 3 deletions crates/ruff/src/importer/insertion.rs
Original file line number Diff line number Diff line change
@@ -137,16 +137,22 @@ impl<'a> Insertion<'a> {
mut location: TextSize,
locator: &Locator<'a>,
stylist: &Stylist,
is_jupyter_notebook: bool,
) -> Insertion<'a> {
enum Awaiting {
Colon(u32),
Newline,
Indent,
}

let mode = if is_jupyter_notebook {
Mode::Jupyter
} else {
Mode::Module
};

let mut state = Awaiting::Colon(0);
for (tok, range) in
lexer::lex_starts_at(locator.after(location), Mode::Module, location).flatten()
for (tok, range) in lexer::lex_starts_at(locator.after(location), mode, location).flatten()
{
match state {
// Iterate until we find the colon indicating the start of the block body.
@@ -427,7 +433,7 @@ x = 1
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, Mode::Module);
let locator = Locator::new(contents);
let stylist = Stylist::from_tokens(&tokens, &locator);
Insertion::start_of_block(offset, &locator, &stylist)
Insertion::start_of_block(offset, &locator, &stylist, false)
}

let contents = "if True: pass";
13 changes: 10 additions & 3 deletions crates/ruff/src/importer/mod.rs
Original file line number Diff line number Diff line change
@@ -121,6 +121,7 @@ impl<'a> Importer<'a> {
import: &StmtImports,
at: TextSize,
semantic: &SemanticModel,
is_jupyter_notebook: bool,
) -> Result<TypingImportEdit> {
// Generate the modified import statement.
let content = autofix::codemods::retain_imports(
@@ -140,7 +141,7 @@ impl<'a> Importer<'a> {
// Add the import to a `TYPE_CHECKING` block.
let add_import_edit = if let Some(block) = self.preceding_type_checking_block(at) {
// Add the import to the `TYPE_CHECKING` block.
self.add_to_type_checking_block(&content, block.start())
self.add_to_type_checking_block(&content, block.start(), is_jupyter_notebook)
} else {
// Add the import to a new `TYPE_CHECKING` block.
self.add_type_checking_block(
@@ -353,8 +354,14 @@ impl<'a> Importer<'a> {
}

/// Add an import statement to an existing `TYPE_CHECKING` block.
fn add_to_type_checking_block(&self, content: &str, at: TextSize) -> Edit {
Insertion::start_of_block(at, self.locator, self.stylist).into_edit(content)
fn add_to_type_checking_block(
&self,
content: &str,
at: TextSize,
is_jupyter_notebook: bool,
) -> Edit {
Insertion::start_of_block(at, self.locator, self.stylist, is_jupyter_notebook)
.into_edit(content)
}

/// Return the import statement that precedes the given position, if any.
8 changes: 7 additions & 1 deletion crates/ruff/src/rules/flake8_annotations/fixes.rs
Original file line number Diff line number Diff line change
@@ -10,14 +10,20 @@ pub(crate) fn add_return_annotation(
locator: &Locator,
stmt: &Stmt,
annotation: &str,
is_jupyter_notebook: bool,
) -> Result<Edit> {
let contents = &locator.contents()[stmt.range()];
let mode = if is_jupyter_notebook {
Mode::Jupyter
} else {
Mode::Module
};

// Find the colon (following the `def` keyword).
let mut seen_lpar = false;
let mut seen_rpar = false;
let mut count = 0u32;
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, stmt.start()).flatten() {
for (tok, range) in lexer::lex_starts_at(contents, mode, stmt.start()).flatten() {
if seen_lpar && seen_rpar {
if matches!(tok, Tok::Colon) {
return Ok(Edit::insertion(format!(" -> {annotation}"), range.start()));
18 changes: 14 additions & 4 deletions crates/ruff/src/rules/flake8_annotations/rules/definition.rs
Original file line number Diff line number Diff line change
@@ -706,8 +706,13 @@ pub(crate) fn definition(
);
if checker.patch(diagnostic.kind.rule()) {
diagnostic.try_set_fix(|| {
fixes::add_return_annotation(checker.locator(), stmt, "None")
.map(Fix::suggested)
fixes::add_return_annotation(
checker.locator(),
stmt,
"None",
checker.is_jupyter_notebook,
)
.map(Fix::suggested)
});
}
diagnostics.push(diagnostic);
@@ -724,8 +729,13 @@ pub(crate) fn definition(
if checker.patch(diagnostic.kind.rule()) {
if let Some(return_type) = simple_magic_return_type(name) {
diagnostic.try_set_fix(|| {
fixes::add_return_annotation(checker.locator(), stmt, return_type)
.map(Fix::suggested)
fixes::add_return_annotation(
checker.locator(),
stmt,
return_type,
checker.is_jupyter_notebook,
)
.map(Fix::suggested)
});
}
}
1 change: 1 addition & 0 deletions crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs
Original file line number Diff line number Diff line change
@@ -351,6 +351,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &D
args,
keywords,
false,
checker.is_jupyter_notebook,
)
.map(Fix::suggested)
});
24 changes: 15 additions & 9 deletions crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs
Original file line number Diff line number Diff line change
@@ -95,18 +95,19 @@ fn elts_to_csv(elts: &[Expr], generator: Generator) -> Option<String> {
/// ```
///
/// This method assumes that the first argument is a string.
fn get_parametrize_name_range(decorator: &Decorator, expr: &Expr, locator: &Locator) -> TextRange {
fn get_parametrize_name_range(
decorator: &Decorator,
expr: &Expr,
locator: &Locator,
mode: Mode,
) -> TextRange {
let mut locations = Vec::new();
let mut implicit_concat = None;

// The parenthesis are not part of the AST, so we need to tokenize the
// decorator to find them.
for (tok, range) in lexer::lex_starts_at(
locator.slice(decorator.range()),
Mode::Module,
decorator.start(),
)
.flatten()
for (tok, range) in
lexer::lex_starts_at(locator.slice(decorator.range()), mode, decorator.start()).flatten()
{
match tok {
Tok::Lpar => locations.push(range.start()),
@@ -131,6 +132,11 @@ fn get_parametrize_name_range(decorator: &Decorator, expr: &Expr, locator: &Loca
/// PT006
fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) {
let names_type = checker.settings.flake8_pytest_style.parametrize_names_type;
let mode = if checker.is_jupyter_notebook {
Mode::Jupyter
} else {
Mode::Module
};

match expr {
Expr::Constant(ast::ExprConstant {
@@ -142,7 +148,7 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) {
match names_type {
types::ParametrizeNameType::Tuple => {
let name_range =
get_parametrize_name_range(decorator, expr, checker.locator());
get_parametrize_name_range(decorator, expr, checker.locator(), mode);
let mut diagnostic = Diagnostic::new(
PytestParametrizeNamesWrongType {
expected: names_type,
@@ -173,7 +179,7 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) {
}
types::ParametrizeNameType::List => {
let name_range =
get_parametrize_name_range(decorator, expr, checker.locator());
get_parametrize_name_range(decorator, expr, checker.locator(), mode);
let mut diagnostic = Diagnostic::new(
PytestParametrizeNamesWrongType {
expected: names_type,
Original file line number Diff line number Diff line change
@@ -66,7 +66,7 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr:
return;
}

let range = match_parens(func.end(), checker.locator())
let range = match_parens(func.end(), checker.locator(), checker.is_jupyter_notebook)
.expect("Expected call to include parentheses");
let mut diagnostic = Diagnostic::new(UnnecessaryParenOnRaiseException, range);
if checker.patch(diagnostic.kind.rule()) {
@@ -78,14 +78,24 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr:
}

/// Return the range of the first parenthesis pair after a given [`TextSize`].
fn match_parens(start: TextSize, locator: &Locator) -> Option<TextRange> {
fn match_parens(
start: TextSize,
locator: &Locator,
is_jupyter_notebook: bool,
) -> Option<TextRange> {
let contents = &locator.contents()[usize::from(start)..];

let mut fix_start = None;
let mut fix_end = None;
let mut count = 0u32;

for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, start).flatten() {
let mode = if is_jupyter_notebook {
Mode::Jupyter
} else {
Mode::Module
};

for (tok, range) in lexer::lex_starts_at(contents, mode, start).flatten() {
match tok {
Tok::Lpar => {
if count == 0 {
1 change: 1 addition & 0 deletions crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs
Original file line number Diff line number Diff line change
@@ -374,6 +374,7 @@ pub(crate) fn nested_if_statements(checker: &mut Checker, stmt_if: &StmtIf, pare
let colon = first_colon_range(
TextRange::new(test.end(), first_stmt.start()),
checker.locator().contents(),
checker.is_jupyter_notebook,
);

// Check if the parent is already emitting a larger diagnostic including this if statement
1 change: 1 addition & 0 deletions crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs
Original file line number Diff line number Diff line change
@@ -119,6 +119,7 @@ pub(crate) fn multiple_with_statements(
body.first().expect("Expected body to be non-empty").start(),
),
checker.locator().contents(),
checker.is_jupyter_notebook,
);

let mut diagnostic = Diagnostic::new(
Original file line number Diff line number Diff line change
@@ -447,6 +447,7 @@ fn fix_imports(checker: &Checker, stmt_id: NodeId, imports: &[Import]) -> Result
},
at,
checker.semantic(),
checker.is_jupyter_notebook,
)?;

Ok(
3 changes: 2 additions & 1 deletion crates/ruff/src/rules/isort/annotate.rs
Original file line number Diff line number Diff line change
@@ -13,6 +13,7 @@ pub(crate) fn annotate_imports<'a>(
comments: Vec<Comment<'a>>,
locator: &Locator,
split_on_trailing_comma: bool,
is_jupyter_notebook: bool,
) -> Vec<AnnotatedImport<'a>> {
let mut comments_iter = comments.into_iter().peekable();

@@ -119,7 +120,7 @@ pub(crate) fn annotate_imports<'a>(
names: aliases,
level: level.map(|level| level.to_u32()),
trailing_comma: if split_on_trailing_comma {
trailing_comma(import, locator)
trailing_comma(import, locator, is_jupyter_notebook)
} else {
TrailingComma::default()
},
13 changes: 11 additions & 2 deletions crates/ruff/src/rules/isort/comments.rs
Original file line number Diff line number Diff line change
@@ -22,9 +22,18 @@ impl Comment<'_> {
}

/// Collect all comments in an import block.
pub(crate) fn collect_comments<'a>(range: TextRange, locator: &'a Locator) -> Vec<Comment<'a>> {
pub(crate) fn collect_comments<'a>(
range: TextRange,
locator: &'a Locator,
is_jupyter_notebook: bool,
) -> Vec<Comment<'a>> {
let contents = locator.slice(range);
lexer::lex_starts_at(contents, Mode::Module, range.start())
let mode = if is_jupyter_notebook {
Mode::Jupyter
} else {
Mode::Module
};
lexer::lex_starts_at(contents, mode, range.start())
.flatten()
.filter_map(|(tok, range)| {
if let Tok::Comment(value) = tok {
13 changes: 11 additions & 2 deletions crates/ruff/src/rules/isort/helpers.rs
Original file line number Diff line number Diff line change
@@ -8,11 +8,20 @@ use crate::rules::isort::types::TrailingComma;

/// Return `true` if a `Stmt::ImportFrom` statement ends with a magic
/// trailing comma.
pub(super) fn trailing_comma(stmt: &Stmt, locator: &Locator) -> TrailingComma {
pub(super) fn trailing_comma(
stmt: &Stmt,
locator: &Locator,
is_jupyter_notebook: bool,
) -> TrailingComma {
let contents = locator.slice(stmt.range());
let mut count = 0u32;
let mut trailing_comma = TrailingComma::Absent;
for (tok, _) in lexer::lex_starts_at(contents, Mode::Module, stmt.start()).flatten() {
let mode = if is_jupyter_notebook {
Mode::Jupyter
} else {
Mode::Module
};
for (tok, _) in lexer::lex_starts_at(contents, mode, stmt.start()).flatten() {
if matches!(tok, Tok::Lpar) {
count = count.saturating_add(1);
}
Loading

0 comments on commit 69ee5e4

Please sign in to comment.