Skip to content

Commit

Permalink
State: optionally apply indents to alt mod
Browse files Browse the repository at this point in the history
  • Loading branch information
kitbellew committed Nov 23, 2024
1 parent a1f3aa9 commit d423741
Show file tree
Hide file tree
Showing 10 changed files with 80 additions and 77 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ case class ModExt(
mod: Modification,
indents: Seq[Indent] = Nil,
altOpt: Option[ModExt] = None,
noAltIndent: Boolean = false,
) {
@inline
def isNL: Boolean = mod.isNL
Expand All @@ -27,18 +28,20 @@ case class ModExt(

altOpt match {
case None => res("")
case Some(x) => x.toString(res("|"), "+")
case Some(x) => x.toString(res("|"), if (noAltIndent) "" else "+")
}
}

override def toString: String = toString("", "")

def withAlt(alt: ModExt): ModExt =
if (altOpt.contains(alt)) this else copy(altOpt = Some(alt))
def withAlt(alt: => ModExt, noAltIndent: Boolean = false): ModExt =
copy(altOpt = Some(alt), noAltIndent = noAltIndent)

@inline
def withAltIf(ok: Boolean)(alt: => ModExt): ModExt =
if (ok) withAlt(alt) else this
def withAltIf(
ok: Boolean,
)(alt: => ModExt, noAltIndent: Boolean = false): ModExt =
if (ok) withAlt(alt, noAltIndent = noAltIndent) else this

def orMod(flag: Boolean, mod: => ModExt): ModExt = if (flag) this else mod

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1951,7 +1951,10 @@ class Router(formatOps: FormatOps) {
case Newlines.fold =>
def nlSplitBase(cost: Int, policy: Policy = NoPolicy)(implicit
fileLine: FileLine,
) = Split(Newline.withAlt(modSpace), cost, policy = policy)
) = {
val nlMod = Newline.withAlt(modSpace, noAltIndent = true)
Split(nlMod, cost, policy = policy)
}
if (nextDotIfSig.isEmpty)
if (nlOnly) Seq(nlSplitBase(0))
else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ final class State(
initialModExt.altOpt.flatMap { alt =>
if (tok.left.is[T.Comment]) None
else if (nextIndent < alt.mod.length + column) None
else if (initialModExt.noAltIndent) Some(alt)
else Some(alt.withIndents(initialModExt.indents))
}.fold((initialNextSplit, nextIndent, nextPushes)) { alt =>
val altPushes = getUnexpired(alt, pushes)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ abstract class CommunityIntellijScalaSuite(name: String)
class CommunityIntellijScala_2024_2_Suite
extends CommunityIntellijScalaSuite("intellij-scala-2024.2") {

override protected def totalStatesVisited: Option[Int] = Some(57957808)
override protected def totalStatesVisited: Option[Int] = Some(59338368)

override protected def builds = Seq(getBuild(
"2024.2.28",
Expand Down Expand Up @@ -51,7 +51,7 @@ class CommunityIntellijScala_2024_2_Suite
class CommunityIntellijScala_2024_3_Suite
extends CommunityIntellijScalaSuite("intellij-scala-2024.3") {

override protected def totalStatesVisited: Option[Int] = Some(58174471)
override protected def totalStatesVisited: Option[Int] = Some(59555031)

override protected def builds = Seq(getBuild(
"2024.3.4",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ abstract class CommunityScala2Suite(name: String)

class CommunityScala2_12Suite extends CommunityScala2Suite("scala-2.12") {

override protected def totalStatesVisited: Option[Int] = Some(42560182)
override protected def totalStatesVisited: Option[Int] = Some(42554938)

override protected def builds =
Seq(getBuild("v2.12.20", dialects.Scala212, 1277))
Expand All @@ -18,7 +18,7 @@ class CommunityScala2_12Suite extends CommunityScala2Suite("scala-2.12") {

class CommunityScala2_13Suite extends CommunityScala2Suite("scala-2.13") {

override protected def totalStatesVisited: Option[Int] = Some(53060139)
override protected def totalStatesVisited: Option[Int] = Some(53041273)

override protected def builds =
Seq(getBuild("v2.13.14", dialects.Scala213, 1287))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,15 @@ abstract class CommunityScala3Suite(name: String)

class CommunityScala3_2Suite extends CommunityScala3Suite("scala-3.2") {

override protected def totalStatesVisited: Option[Int] = Some(39246875)
override protected def totalStatesVisited: Option[Int] = Some(39229069)

override protected def builds = Seq(getBuild("3.2.2", dialects.Scala32, 791))

}

class CommunityScala3_3Suite extends CommunityScala3Suite("scala-3.3") {

override protected def totalStatesVisited: Option[Int] = Some(42432863)
override protected def totalStatesVisited: Option[Int] = Some(42414755)

override protected def builds = Seq(getBuild("3.3.3", dialects.Scala33, 861))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ abstract class CommunitySparkSuite(name: String)

class CommunitySpark3_4Suite extends CommunitySparkSuite("spark-3.4") {

override protected def totalStatesVisited: Option[Int] = Some(86399848)
override protected def totalStatesVisited: Option[Int] = Some(86438964)

override protected def builds =
Seq(getBuild("v3.4.1", dialects.Scala213, 2585))
Expand All @@ -18,7 +18,7 @@ class CommunitySpark3_4Suite extends CommunitySparkSuite("spark-3.4") {

class CommunitySpark3_5Suite extends CommunitySparkSuite("spark-3.5") {

override protected def totalStatesVisited: Option[Int] = Some(91404596)
override protected def totalStatesVisited: Option[Int] = Some(91445316)

override protected def builds =
Seq(getBuild("v3.5.3", dialects.Scala213, 2756))
Expand Down
108 changes: 52 additions & 56 deletions scalafmt-tests/shared/src/test/resources/newlines/source_fold.stat
Original file line number Diff line number Diff line change
Expand Up @@ -8430,7 +8430,7 @@ object a {
)
)
}
>>> { stateVisits = 4769, stateVisits2 = 4769 }
>>> { stateVisits = 3882, stateVisits2 = 3882 }
object a {
div(cls := "cover")(
div(cls := "doc")(bodyContents),
Expand Down Expand Up @@ -9558,61 +9558,57 @@ object a {
.map(_.filterNot(_.getCanonicalPath.contains("SSLOptions")))
}
}
>>> { stateVisits = 3668, stateVisits2 = 3668 }
>>> { stateVisits = 52849, stateVisits2 = 52849 }
object a {
private def ignoreUndocumentedPackages(
packages: Seq[Seq[File]]
): Seq[Seq[File]] = {
packages.map(_.filterNot(_.getName.contains("$")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/deploy")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/examples")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/internal")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/memory")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/network")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/rpc")))
.map(_.filterNot(f =>
f.getCanonicalPath.contains("org/apache/spark/shuffle") &&
!f.getCanonicalPath.contains("org/apache/spark/shuffle/api")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/executor")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/ExecutorAllocationClient")
)).map(_.filterNot(_.getCanonicalPath.contains(
"org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend"
))).map(_.filterNot(f =>
f.getCanonicalPath.contains("org/apache/spark/unsafe") &&
!f.getCanonicalPath
.contains("org/apache/spark/unsafe/types/CalendarInterval")
)).map(_.filterNot(_.getCanonicalPath.contains("python")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/util/collection")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/io")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/util/kvstore")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/catalyst")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/connect/")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/execution")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/internal")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/hive")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/catalog/v2/utils")
))
.map(_.filterNot(_.getCanonicalPath.contains("org.apache.spark.errors")))
.map(_.filterNot(
_.getCanonicalPath.contains("org.apache.spark.sql.errors")
)).map(_.filterNot(_.getCanonicalPath.contains("org/apache/hive")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/v2/avro")
)).map(_.filterNot(_.getCanonicalPath.contains("SSLOptions")))
packages.map(_.filterNot(_.getName.contains("$"))).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/deploy")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/examples")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/internal")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/memory")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/network")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/rpc")))
.map(_.filterNot(f =>
f.getCanonicalPath.contains("org/apache/spark/shuffle") &&
!f.getCanonicalPath.contains("org/apache/spark/shuffle/api")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/executor")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/ExecutorAllocationClient")
)).map(_.filterNot(_.getCanonicalPath.contains(
"org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend"
))).map(_.filterNot(f =>
f.getCanonicalPath.contains("org/apache/spark/unsafe") &&
!f.getCanonicalPath
.contains("org/apache/spark/unsafe/types/CalendarInterval")
)).map(_.filterNot(_.getCanonicalPath.contains("python"))).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/util/collection")
)).map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/io")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/util/kvstore")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/catalyst")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/connect/")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/execution")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/internal")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/hive")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/catalog/v2/utils")
)).map(_.filterNot(_.getCanonicalPath.contains("org.apache.spark.errors")))
.map(_.filterNot(
_.getCanonicalPath.contains("org.apache.spark.sql.errors")
)).map(_.filterNot(_.getCanonicalPath.contains("org/apache/hive")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/v2/avro")
)).map(_.filterNot(_.getCanonicalPath.contains("SSLOptions")))
}
}
<<< #4133 #4133 spark: massive chain of applies with function calls
Expand Down Expand Up @@ -10069,7 +10065,7 @@ system.dynamicAccess.createInstanceFor[Serializer](fqn, Nil).recoverWith {
}
}
}
>>> { stateVisits = 4820, stateVisits2 = 4820 }
>>> { stateVisits = 3689, stateVisits2 = 3689 }
system.dynamicAccess.createInstanceFor[Serializer](fqn, Nil).recoverWith {
case _: NoSuchMethodException => system.dynamicAccess
.createInstanceFor[Serializer](
Expand Down Expand Up @@ -10512,9 +10508,9 @@ object a {
// c1
baz
}.qux { x =>
// c2
quux(x)
}
// c2
quux(x)
}
}
<<< chained maps with multi-line blocks, with braces-to-parens not OK
object a {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6702,12 +6702,12 @@ object a {
x8: Any, x9: Any, x10: Any, x11: Any, x12: Any, x13: Any,
x14: Any, x15: Any, x16: Any, x17: Any, x18: Any) =>
g.asInstanceOf[
Tuple18[
_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _,
_] => Any].apply(
(
x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13,
x14, x15, x16, x17, x18))).asInstanceOf[F])
Tuple18[
_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _,
_] => Any].apply(
(
x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13,
x14, x15, x16, x17, x18))).asInstanceOf[F])
}
<<< #4133 binpack overflow assignment with selects, attributes, !dangling
indent.callSite = 4
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ class FormatTests extends FunSuite with CanRunTests with FormatAssertions {
val explored = Debug.explored.get()
logger.debug(s"Total explored: $explored")
if (!onlyUnit && !onlyManual)
assertEquals(explored, 1089468, "total explored")
assertEquals(explored, 1183844, "total explored")
val results = debugResults.result()
// TODO(olafur) don't block printing out test results.
// I don't want to deal with scalaz's Tasks :'(
Expand Down

0 comments on commit d423741

Please sign in to comment.