-
Notifications
You must be signed in to change notification settings - Fork 2.5k
/
linegen.py
1245 lines (1071 loc) · 49.3 KB
/
linegen.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
Generating lines of code.
"""
from functools import partial, wraps
import sys
from typing import Collection, Iterator, List, Optional, Set, Union, cast
from black.nodes import WHITESPACE, RARROW, STATEMENT, STANDALONE_COMMENT
from black.nodes import ASSIGNMENTS, OPENING_BRACKETS, CLOSING_BRACKETS
from black.nodes import Visitor, syms, is_arith_like, ensure_visible
from black.nodes import (
is_docstring,
is_empty_tuple,
is_one_tuple,
is_one_sequence_between,
)
from black.nodes import is_name_token, is_lpar_token, is_rpar_token
from black.nodes import is_walrus_assignment, is_yield, is_vararg, is_multiline_string
from black.nodes import is_stub_suite, is_stub_body, is_atom_with_invisible_parens
from black.nodes import wrap_in_parentheses
from black.brackets import max_delimiter_priority_in_atom
from black.brackets import DOT_PRIORITY, COMMA_PRIORITY
from black.lines import Line, line_to_string, is_line_short_enough
from black.lines import can_omit_invisible_parens, can_be_split, append_leaves
from black.comments import generate_comments, list_comments, FMT_OFF
from black.numerics import normalize_numeric_literal
from black.strings import get_string_prefix, fix_docstring
from black.strings import normalize_string_prefix, normalize_string_quotes
from black.trans import Transformer, CannotTransform, StringMerger, StringSplitter
from black.trans import StringParenWrapper, StringParenStripper, hug_power_op
from black.mode import Mode, Feature, Preview
from blib2to3.pytree import Node, Leaf
from blib2to3.pgen2 import token
# types
LeafID = int
LN = Union[Leaf, Node]
class CannotSplit(CannotTransform):
"""A readable split that fits the allotted line length is impossible."""
# This isn't a dataclass because @dataclass + Generic breaks mypyc.
# See also https://github.com/mypyc/mypyc/issues/827.
class LineGenerator(Visitor[Line]):
"""Generates reformatted Line objects. Empty lines are not emitted.
Note: destroys the tree it's visiting by mutating prefixes of its leaves
in ways that will no longer stringify to valid Python code on the tree.
"""
def __init__(self, mode: Mode) -> None:
self.mode = mode
self.current_line: Line
self.__post_init__()
def line(self, indent: int = 0) -> Iterator[Line]:
"""Generate a line.
If the line is empty, only emit if it makes sense.
If the line is too long, split it first and then generate.
If any lines were generated, set up a new current_line.
"""
if not self.current_line:
self.current_line.depth += indent
return # Line is empty, don't emit. Creating a new one unnecessary.
complete_line = self.current_line
self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
yield complete_line
def visit_default(self, node: LN) -> Iterator[Line]:
"""Default `visit_*()` implementation. Recurses to children of `node`."""
if isinstance(node, Leaf):
any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
for comment in generate_comments(node, preview=self.mode.preview):
if any_open_brackets:
# any comment within brackets is subject to splitting
self.current_line.append(comment)
elif comment.type == token.COMMENT:
# regular trailing comment
self.current_line.append(comment)
yield from self.line()
else:
# regular standalone comment
yield from self.line()
self.current_line.append(comment)
yield from self.line()
normalize_prefix(node, inside_brackets=any_open_brackets)
if self.mode.string_normalization and node.type == token.STRING:
node.value = normalize_string_prefix(node.value)
node.value = normalize_string_quotes(node.value)
if node.type == token.NUMBER:
normalize_numeric_literal(node)
if node.type not in WHITESPACE:
self.current_line.append(node)
yield from super().visit_default(node)
def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
"""Increase indentation level, maybe yield a line."""
# In blib2to3 INDENT never holds comments.
yield from self.line(+1)
yield from self.visit_default(node)
def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
"""Decrease indentation level, maybe yield a line."""
# The current line might still wait for trailing comments. At DEDENT time
# there won't be any (they would be prefixes on the preceding NEWLINE).
# Emit the line then.
yield from self.line()
# While DEDENT has no value, its prefix may contain standalone comments
# that belong to the current indentation level. Get 'em.
yield from self.visit_default(node)
# Finally, emit the dedent.
yield from self.line(-1)
def visit_stmt(
self, node: Node, keywords: Set[str], parens: Set[str]
) -> Iterator[Line]:
"""Visit a statement.
This implementation is shared for `if`, `while`, `for`, `try`, `except`,
`def`, `with`, `class`, `assert`, and assignments.
The relevant Python language `keywords` for a given statement will be
NAME leaves within it. This methods puts those on a separate line.
`parens` holds a set of string leaf values immediately after which
invisible parens should be put.
"""
normalize_invisible_parens(node, parens_after=parens, preview=self.mode.preview)
for child in node.children:
if is_name_token(child) and child.value in keywords:
yield from self.line()
yield from self.visit(child)
def visit_funcdef(self, node: Node) -> Iterator[Line]:
"""Visit function definition."""
if Preview.annotation_parens not in self.mode:
yield from self.visit_stmt(node, keywords={"def"}, parens=set())
else:
yield from self.line()
# Remove redundant brackets around return type annotation.
is_return_annotation = False
for child in node.children:
if child.type == token.RARROW:
is_return_annotation = True
elif is_return_annotation:
if child.type == syms.atom and child.children[0].type == token.LPAR:
if maybe_make_parens_invisible_in_atom(
child,
parent=node,
remove_brackets_around_comma=False,
):
wrap_in_parentheses(node, child, visible=False)
else:
wrap_in_parentheses(node, child, visible=False)
is_return_annotation = False
for child in node.children:
yield from self.visit(child)
def visit_match_case(self, node: Node) -> Iterator[Line]:
"""Visit either a match or case statement."""
normalize_invisible_parens(node, parens_after=set(), preview=self.mode.preview)
yield from self.line()
for child in node.children:
yield from self.visit(child)
def visit_suite(self, node: Node) -> Iterator[Line]:
"""Visit a suite."""
if self.mode.is_pyi and is_stub_suite(node):
yield from self.visit(node.children[2])
else:
yield from self.visit_default(node)
def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
"""Visit a statement without nested statements."""
prev_type: Optional[int] = None
for child in node.children:
if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
wrap_in_parentheses(node, child, visible=False)
prev_type = child.type
is_suite_like = node.parent and node.parent.type in STATEMENT
if is_suite_like:
if self.mode.is_pyi and is_stub_body(node):
yield from self.visit_default(node)
else:
yield from self.line(+1)
yield from self.visit_default(node)
yield from self.line(-1)
else:
if (
not self.mode.is_pyi
or not node.parent
or not is_stub_suite(node.parent)
):
yield from self.line()
yield from self.visit_default(node)
def visit_async_stmt(self, node: Node) -> Iterator[Line]:
"""Visit `async def`, `async for`, `async with`."""
yield from self.line()
children = iter(node.children)
for child in children:
yield from self.visit(child)
if child.type == token.ASYNC:
break
internal_stmt = next(children)
for child in internal_stmt.children:
yield from self.visit(child)
def visit_decorators(self, node: Node) -> Iterator[Line]:
"""Visit decorators."""
for child in node.children:
yield from self.line()
yield from self.visit(child)
def visit_power(self, node: Node) -> Iterator[Line]:
for idx, leaf in enumerate(node.children[:-1]):
next_leaf = node.children[idx + 1]
if not isinstance(leaf, Leaf):
continue
value = leaf.value.lower()
if (
leaf.type == token.NUMBER
and next_leaf.type == syms.trailer
# Ensure that we are in an attribute trailer
and next_leaf.children[0].type == token.DOT
# It shouldn't wrap hexadecimal, binary and octal literals
and not value.startswith(("0x", "0b", "0o"))
# It shouldn't wrap complex literals
and "j" not in value
):
wrap_in_parentheses(node, leaf)
if Preview.remove_redundant_parens in self.mode:
remove_await_parens(node)
yield from self.visit_default(node)
def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
"""Remove a semicolon and put the other statement on a separate line."""
yield from self.line()
def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
"""End of file. Process outstanding comments and end with a newline."""
yield from self.visit_default(leaf)
yield from self.line()
def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
if not self.current_line.bracket_tracker.any_open_brackets():
yield from self.line()
yield from self.visit_default(leaf)
def visit_factor(self, node: Node) -> Iterator[Line]:
"""Force parentheses between a unary op and a binary power:
-2 ** 8 -> -(2 ** 8)
"""
_operator, operand = node.children
if (
operand.type == syms.power
and len(operand.children) == 3
and operand.children[1].type == token.DOUBLESTAR
):
lpar = Leaf(token.LPAR, "(")
rpar = Leaf(token.RPAR, ")")
index = operand.remove() or 0
node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
yield from self.visit_default(node)
def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
if is_docstring(leaf) and "\\\n" not in leaf.value:
# We're ignoring docstrings with backslash newline escapes because changing
# indentation of those changes the AST representation of the code.
docstring = normalize_string_prefix(leaf.value)
prefix = get_string_prefix(docstring)
docstring = docstring[len(prefix) :] # Remove the prefix
quote_char = docstring[0]
# A natural way to remove the outer quotes is to do:
# docstring = docstring.strip(quote_char)
# but that breaks on """""x""" (which is '""x').
# So we actually need to remove the first character and the next two
# characters but only if they are the same as the first.
quote_len = 1 if docstring[1] != quote_char else 3
docstring = docstring[quote_len:-quote_len]
docstring_started_empty = not docstring
indent = " " * 4 * self.current_line.depth
if is_multiline_string(leaf):
docstring = fix_docstring(docstring, indent)
else:
docstring = docstring.strip()
if docstring:
# Add some padding if the docstring starts / ends with a quote mark.
if docstring[0] == quote_char:
docstring = " " + docstring
if docstring[-1] == quote_char:
docstring += " "
if docstring[-1] == "\\":
backslash_count = len(docstring) - len(docstring.rstrip("\\"))
if backslash_count % 2:
# Odd number of tailing backslashes, add some padding to
# avoid escaping the closing string quote.
docstring += " "
elif not docstring_started_empty:
docstring = " "
# We could enforce triple quotes at this point.
quote = quote_char * quote_len
if Preview.long_docstring_quotes_on_newline in self.mode:
# We need to find the length of the last line of the docstring
# to find if we can add the closing quotes to the line without
# exceeding the maximum line length.
# If docstring is one line, then we need to add the length
# of the indent, prefix, and starting quotes. Ending quote are
# handled later
lines = docstring.splitlines()
last_line_length = len(lines[-1]) if docstring else 0
if len(lines) == 1:
last_line_length += len(indent) + len(prefix) + quote_len
# If adding closing quotes would cause the last line to exceed
# the maximum line length then put a line break before the
# closing quotes
if last_line_length + quote_len > self.mode.line_length:
leaf.value = prefix + quote + docstring + "\n" + indent + quote
else:
leaf.value = prefix + quote + docstring + quote
else:
leaf.value = prefix + quote + docstring + quote
yield from self.visit_default(leaf)
def __post_init__(self) -> None:
"""You are in a twisty little maze of passages."""
self.current_line = Line(mode=self.mode)
v = self.visit_stmt
Ø: Set[str] = set()
self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
self.visit_if_stmt = partial(
v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
)
self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
self.visit_try_stmt = partial(
v, keywords={"try", "except", "else", "finally"}, parens=Ø
)
if self.mode.preview:
self.visit_except_clause = partial(
v, keywords={"except"}, parens={"except"}
)
self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
else:
self.visit_except_clause = partial(v, keywords={"except"}, parens=Ø)
self.visit_with_stmt = partial(v, keywords={"with"}, parens=Ø)
self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
self.visit_async_funcdef = self.visit_async_stmt
self.visit_decorated = self.visit_decorators
# PEP 634
self.visit_match_stmt = self.visit_match_case
self.visit_case_block = self.visit_match_case
def transform_line(
line: Line, mode: Mode, features: Collection[Feature] = ()
) -> Iterator[Line]:
"""Transform a `line`, potentially splitting it into many lines.
They should fit in the allotted `line_length` but might not be able to.
`features` are syntactical features that may be used in the output.
"""
if line.is_comment:
yield line
return
line_str = line_to_string(line)
ll = mode.line_length
sn = mode.string_normalization
string_merge = StringMerger(ll, sn)
string_paren_strip = StringParenStripper(ll, sn)
string_split = StringSplitter(ll, sn)
string_paren_wrap = StringParenWrapper(ll, sn)
transformers: List[Transformer]
if (
not line.contains_uncollapsable_type_comments()
and not line.should_split_rhs
and not line.magic_trailing_comma
and (
is_line_short_enough(line, line_length=mode.line_length, line_str=line_str)
or line.contains_unsplittable_type_ignore()
)
and not (line.inside_brackets and line.contains_standalone_comments())
):
# Only apply basic string preprocessing, since lines shouldn't be split here.
if Preview.string_processing in mode:
transformers = [string_merge, string_paren_strip]
else:
transformers = []
elif line.is_def:
transformers = [left_hand_split]
else:
def _rhs(
self: object, line: Line, features: Collection[Feature]
) -> Iterator[Line]:
"""Wraps calls to `right_hand_split`.
The calls increasingly `omit` right-hand trailers (bracket pairs with
content), meaning the trailers get glued together to split on another
bracket pair instead.
"""
for omit in generate_trailers_to_omit(line, mode.line_length):
lines = list(
right_hand_split(line, mode.line_length, features, omit=omit)
)
# Note: this check is only able to figure out if the first line of the
# *current* transformation fits in the line length. This is true only
# for simple cases. All others require running more transforms via
# `transform_line()`. This check doesn't know if those would succeed.
if is_line_short_enough(lines[0], line_length=mode.line_length):
yield from lines
return
# All splits failed, best effort split with no omits.
# This mostly happens to multiline strings that are by definition
# reported as not fitting a single line, as well as lines that contain
# trailing commas (those have to be exploded).
yield from right_hand_split(
line, line_length=mode.line_length, features=features
)
# HACK: nested functions (like _rhs) compiled by mypyc don't retain their
# __name__ attribute which is needed in `run_transformer` further down.
# Unfortunately a nested class breaks mypyc too. So a class must be created
# via type ... https://github.com/mypyc/mypyc/issues/884
rhs = type("rhs", (), {"__call__": _rhs})()
if Preview.string_processing in mode:
if line.inside_brackets:
transformers = [
string_merge,
string_paren_strip,
string_split,
delimiter_split,
standalone_comment_split,
string_paren_wrap,
rhs,
]
else:
transformers = [
string_merge,
string_paren_strip,
string_split,
string_paren_wrap,
rhs,
]
else:
if line.inside_brackets:
transformers = [delimiter_split, standalone_comment_split, rhs]
else:
transformers = [rhs]
# It's always safe to attempt hugging of power operations and pretty much every line
# could match.
transformers.append(hug_power_op)
for transform in transformers:
# We are accumulating lines in `result` because we might want to abort
# mission and return the original line in the end, or attempt a different
# split altogether.
try:
result = run_transformer(line, transform, mode, features, line_str=line_str)
except CannotTransform:
continue
else:
yield from result
break
else:
yield line
def left_hand_split(line: Line, _features: Collection[Feature] = ()) -> Iterator[Line]:
"""Split line into many lines, starting with the first matching bracket pair.
Note: this usually looks weird, only use this for function definitions.
Prefer RHS otherwise. This is why this function is not symmetrical with
:func:`right_hand_split` which also handles optional parentheses.
"""
tail_leaves: List[Leaf] = []
body_leaves: List[Leaf] = []
head_leaves: List[Leaf] = []
current_leaves = head_leaves
matching_bracket: Optional[Leaf] = None
for leaf in line.leaves:
if (
current_leaves is body_leaves
and leaf.type in CLOSING_BRACKETS
and leaf.opening_bracket is matching_bracket
and isinstance(matching_bracket, Leaf)
):
ensure_visible(leaf)
ensure_visible(matching_bracket)
current_leaves = tail_leaves if body_leaves else head_leaves
current_leaves.append(leaf)
if current_leaves is head_leaves:
if leaf.type in OPENING_BRACKETS:
matching_bracket = leaf
current_leaves = body_leaves
if not matching_bracket:
raise CannotSplit("No brackets found")
head = bracket_split_build_line(head_leaves, line, matching_bracket)
body = bracket_split_build_line(body_leaves, line, matching_bracket, is_body=True)
tail = bracket_split_build_line(tail_leaves, line, matching_bracket)
bracket_split_succeeded_or_raise(head, body, tail)
for result in (head, body, tail):
if result:
yield result
def right_hand_split(
line: Line,
line_length: int,
features: Collection[Feature] = (),
omit: Collection[LeafID] = (),
) -> Iterator[Line]:
"""Split line into many lines, starting with the last matching bracket pair.
If the split was by optional parentheses, attempt splitting without them, too.
`omit` is a collection of closing bracket IDs that shouldn't be considered for
this split.
Note: running this function modifies `bracket_depth` on the leaves of `line`.
"""
tail_leaves: List[Leaf] = []
body_leaves: List[Leaf] = []
head_leaves: List[Leaf] = []
current_leaves = tail_leaves
opening_bracket: Optional[Leaf] = None
closing_bracket: Optional[Leaf] = None
for leaf in reversed(line.leaves):
if current_leaves is body_leaves:
if leaf is opening_bracket:
current_leaves = head_leaves if body_leaves else tail_leaves
current_leaves.append(leaf)
if current_leaves is tail_leaves:
if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
opening_bracket = leaf.opening_bracket
closing_bracket = leaf
current_leaves = body_leaves
if not (opening_bracket and closing_bracket and head_leaves):
# If there is no opening or closing_bracket that means the split failed and
# all content is in the tail. Otherwise, if `head_leaves` are empty, it means
# the matching `opening_bracket` wasn't available on `line` anymore.
raise CannotSplit("No brackets found")
tail_leaves.reverse()
body_leaves.reverse()
head_leaves.reverse()
head = bracket_split_build_line(head_leaves, line, opening_bracket)
body = bracket_split_build_line(body_leaves, line, opening_bracket, is_body=True)
tail = bracket_split_build_line(tail_leaves, line, opening_bracket)
bracket_split_succeeded_or_raise(head, body, tail)
if (
Feature.FORCE_OPTIONAL_PARENTHESES not in features
# the opening bracket is an optional paren
and opening_bracket.type == token.LPAR
and not opening_bracket.value
# the closing bracket is an optional paren
and closing_bracket.type == token.RPAR
and not closing_bracket.value
# it's not an import (optional parens are the only thing we can split on
# in this case; attempting a split without them is a waste of time)
and not line.is_import
# there are no standalone comments in the body
and not body.contains_standalone_comments(0)
# and we can actually remove the parens
and can_omit_invisible_parens(body, line_length)
):
omit = {id(closing_bracket), *omit}
try:
yield from right_hand_split(line, line_length, features=features, omit=omit)
return
except CannotSplit as e:
if not (
can_be_split(body)
or is_line_short_enough(body, line_length=line_length)
):
raise CannotSplit(
"Splitting failed, body is still too long and can't be split."
) from e
elif head.contains_multiline_strings() or tail.contains_multiline_strings():
raise CannotSplit(
"The current optional pair of parentheses is bound to fail to"
" satisfy the splitting algorithm because the head or the tail"
" contains multiline strings which by definition never fit one"
" line."
) from e
ensure_visible(opening_bracket)
ensure_visible(closing_bracket)
for result in (head, body, tail):
if result:
yield result
def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
"""Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
Do nothing otherwise.
A left- or right-hand split is based on a pair of brackets. Content before
(and including) the opening bracket is left on one line, content inside the
brackets is put on a separate line, and finally content starting with and
following the closing bracket is put on a separate line.
Those are called `head`, `body`, and `tail`, respectively. If the split
produced the same line (all content in `head`) or ended up with an empty `body`
and the `tail` is just the closing bracket, then it's considered failed.
"""
tail_len = len(str(tail).strip())
if not body:
if tail_len == 0:
raise CannotSplit("Splitting brackets produced the same line")
elif tail_len < 3:
raise CannotSplit(
f"Splitting brackets on an empty body to save {tail_len} characters is"
" not worth it"
)
def bracket_split_build_line(
leaves: List[Leaf], original: Line, opening_bracket: Leaf, *, is_body: bool = False
) -> Line:
"""Return a new line with given `leaves` and respective comments from `original`.
If `is_body` is True, the result line is one-indented inside brackets and as such
has its first leaf's prefix normalized and a trailing comma added when expected.
"""
result = Line(mode=original.mode, depth=original.depth)
if is_body:
result.inside_brackets = True
result.depth += 1
if leaves:
# Since body is a new indent level, remove spurious leading whitespace.
normalize_prefix(leaves[0], inside_brackets=True)
# Ensure a trailing comma for imports and standalone function arguments, but
# be careful not to add one after any comments or within type annotations.
no_commas = (
original.is_def
and opening_bracket.value == "("
and not any(leaf.type == token.COMMA for leaf in leaves)
# In particular, don't add one within a parenthesized return annotation.
# Unfortunately the indicator we're in a return annotation (RARROW) may
# be defined directly in the parent node, the parent of the parent ...
# and so on depending on how complex the return annotation is.
# This isn't perfect and there's some false negatives but they are in
# contexts were a comma is actually fine.
and not any(
node.prev_sibling.type == RARROW
for node in (
leaves[0].parent,
getattr(leaves[0].parent, "parent", None),
)
if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf)
)
)
if original.is_import or no_commas:
for i in range(len(leaves) - 1, -1, -1):
if leaves[i].type == STANDALONE_COMMENT:
continue
if leaves[i].type != token.COMMA:
new_comma = Leaf(token.COMMA, ",")
leaves.insert(i + 1, new_comma)
break
# Populate the line
for leaf in leaves:
result.append(leaf, preformatted=True)
for comment_after in original.comments_after(leaf):
result.append(comment_after, preformatted=True)
if is_body and should_split_line(result, opening_bracket):
result.should_split_rhs = True
return result
def dont_increase_indentation(split_func: Transformer) -> Transformer:
"""Normalize prefix of the first leaf in every line returned by `split_func`.
This is a decorator over relevant split functions.
"""
@wraps(split_func)
def split_wrapper(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
for split_line in split_func(line, features):
normalize_prefix(split_line.leaves[0], inside_brackets=True)
yield split_line
return split_wrapper
@dont_increase_indentation
def delimiter_split(line: Line, features: Collection[Feature] = ()) -> Iterator[Line]:
"""Split according to delimiters of the highest priority.
If the appropriate Features are given, the split will add trailing commas
also in function signatures and calls that contain `*` and `**`.
"""
try:
last_leaf = line.leaves[-1]
except IndexError:
raise CannotSplit("Line empty") from None
bt = line.bracket_tracker
try:
delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
except ValueError:
raise CannotSplit("No delimiters found") from None
if delimiter_priority == DOT_PRIORITY:
if bt.delimiter_count_with_priority(delimiter_priority) == 1:
raise CannotSplit("Splitting a single attribute from its owner looks wrong")
current_line = Line(
mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
)
lowest_depth = sys.maxsize
trailing_comma_safe = True
def append_to_line(leaf: Leaf) -> Iterator[Line]:
"""Append `leaf` to current line or to new line if appending impossible."""
nonlocal current_line
try:
current_line.append_safe(leaf, preformatted=True)
except ValueError:
yield current_line
current_line = Line(
mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
)
current_line.append(leaf)
for leaf in line.leaves:
yield from append_to_line(leaf)
for comment_after in line.comments_after(leaf):
yield from append_to_line(comment_after)
lowest_depth = min(lowest_depth, leaf.bracket_depth)
if leaf.bracket_depth == lowest_depth:
if is_vararg(leaf, within={syms.typedargslist}):
trailing_comma_safe = (
trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features
)
elif is_vararg(leaf, within={syms.arglist, syms.argument}):
trailing_comma_safe = (
trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features
)
leaf_priority = bt.delimiters.get(id(leaf))
if leaf_priority == delimiter_priority:
yield current_line
current_line = Line(
mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
)
if current_line:
if (
trailing_comma_safe
and delimiter_priority == COMMA_PRIORITY
and current_line.leaves[-1].type != token.COMMA
and current_line.leaves[-1].type != STANDALONE_COMMENT
):
new_comma = Leaf(token.COMMA, ",")
current_line.append(new_comma)
yield current_line
@dont_increase_indentation
def standalone_comment_split(
line: Line, features: Collection[Feature] = ()
) -> Iterator[Line]:
"""Split standalone comments from the rest of the line."""
if not line.contains_standalone_comments(0):
raise CannotSplit("Line does not have any standalone comments")
current_line = Line(
mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
)
def append_to_line(leaf: Leaf) -> Iterator[Line]:
"""Append `leaf` to current line or to new line if appending impossible."""
nonlocal current_line
try:
current_line.append_safe(leaf, preformatted=True)
except ValueError:
yield current_line
current_line = Line(
line.mode, depth=line.depth, inside_brackets=line.inside_brackets
)
current_line.append(leaf)
for leaf in line.leaves:
yield from append_to_line(leaf)
for comment_after in line.comments_after(leaf):
yield from append_to_line(comment_after)
if current_line:
yield current_line
def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None:
"""Leave existing extra newlines if not `inside_brackets`. Remove everything
else.
Note: don't use backslashes for formatting or you'll lose your voting rights.
"""
if not inside_brackets:
spl = leaf.prefix.split("#")
if "\\" not in spl[0]:
nl_count = spl[-1].count("\n")
if len(spl) > 1:
nl_count -= 1
leaf.prefix = "\n" * nl_count
return
leaf.prefix = ""
def normalize_invisible_parens(
node: Node, parens_after: Set[str], *, preview: bool
) -> None:
"""Make existing optional parentheses invisible or create new ones.
`parens_after` is a set of string leaf values immediately after which parens
should be put.
Standardizes on visible parentheses for single-element tuples, and keeps
existing visible parentheses for other tuples and generator expressions.
"""
for pc in list_comments(node.prefix, is_endmarker=False, preview=preview):
if pc.value in FMT_OFF:
# This `node` has a prefix with `# fmt: off`, don't mess with parens.
return
check_lpar = False
for index, child in enumerate(list(node.children)):
# Fixes a bug where invisible parens are not properly stripped from
# assignment statements that contain type annotations.
if isinstance(child, Node) and child.type == syms.annassign:
normalize_invisible_parens(
child, parens_after=parens_after, preview=preview
)
# Add parentheses around long tuple unpacking in assignments.
if (
index == 0
and isinstance(child, Node)
and child.type == syms.testlist_star_expr
):
check_lpar = True
if check_lpar:
if (
preview
and child.type == syms.atom
and node.type == syms.for_stmt
and isinstance(child.prev_sibling, Leaf)
and child.prev_sibling.type == token.NAME
and child.prev_sibling.value == "for"
):
if maybe_make_parens_invisible_in_atom(
child,
parent=node,
remove_brackets_around_comma=True,
):
wrap_in_parentheses(node, child, visible=False)
elif preview and isinstance(child, Node) and node.type == syms.with_stmt:
remove_with_parens(child, node)
elif child.type == syms.atom:
if maybe_make_parens_invisible_in_atom(
child,
parent=node,
):
wrap_in_parentheses(node, child, visible=False)
elif is_one_tuple(child):
wrap_in_parentheses(node, child, visible=True)
elif node.type == syms.import_from:
# "import from" nodes store parentheses directly as part of
# the statement
if is_lpar_token(child):
assert is_rpar_token(node.children[-1])
# make parentheses invisible
child.value = ""
node.children[-1].value = ""
elif child.type != token.STAR:
# insert invisible parentheses
node.insert_child(index, Leaf(token.LPAR, ""))
node.append_child(Leaf(token.RPAR, ""))
break
elif (
index == 1
and child.type == token.STAR
and node.type == syms.except_clause
):
# In except* (PEP 654), the star is actually part of
# of the keyword. So we need to skip the insertion of
# invisible parentheses to work more precisely.
continue
elif not (isinstance(child, Leaf) and is_multiline_string(child)):
wrap_in_parentheses(node, child, visible=False)
comma_check = child.type == token.COMMA if preview else False
check_lpar = isinstance(child, Leaf) and (
child.value in parens_after or comma_check
)
def remove_await_parens(node: Node) -> None:
if node.children[0].type == token.AWAIT and len(node.children) > 1:
if (
node.children[1].type == syms.atom
and node.children[1].children[0].type == token.LPAR
):
if maybe_make_parens_invisible_in_atom(
node.children[1],
parent=node,
remove_brackets_around_comma=True,
):
wrap_in_parentheses(node, node.children[1], visible=False)
# Since await is an expression we shouldn't remove
# brackets in cases where this would change
# the AST due to operator precedence.
# Therefore we only aim to remove brackets around
# power nodes that aren't also await expressions themselves.
# https://peps.python.org/pep-0492/#updated-operator-precedence-table
# N.B. We've still removed any redundant nested brackets though :)
opening_bracket = cast(Leaf, node.children[1].children[0])
closing_bracket = cast(Leaf, node.children[1].children[-1])
bracket_contents = cast(Node, node.children[1].children[1])
if bracket_contents.type != syms.power:
ensure_visible(opening_bracket)
ensure_visible(closing_bracket)
elif (
bracket_contents.type == syms.power
and bracket_contents.children[0].type == token.AWAIT
):
ensure_visible(opening_bracket)
ensure_visible(closing_bracket)
# If we are in a nested await then recurse down.
remove_await_parens(bracket_contents)
def remove_with_parens(node: Node, parent: Node) -> None:
"""Recursively hide optional parens in `with` statements."""
# Removing all unnecessary parentheses in with statements in one pass is a tad
# complex as different variations of bracketed statements result in pretty
# different parse trees: