Skip to content

Commit

Permalink
Merge pull request #474 from marcelocenerine/trim_includeNewLine
Browse files Browse the repository at this point in the history
Add methods for dealing with whitespaces
  • Loading branch information
olafurpg authored Dec 19, 2017
2 parents 8bd026d + 47b94c0 commit 3f63b5b
Show file tree
Hide file tree
Showing 4 changed files with 98 additions and 17 deletions.
11 changes: 7 additions & 4 deletions project/Mima.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,14 @@ object Mima {
ProblemFilters.exclude[DirectMissingMethodProblem](
"scalafix.rule.RuleCtx.printLintMessage"),
ProblemFilters.exclude[ReversedMissingMethodProblem](
"scalafix.rule.RuleCtx.filterLintMessage"
),
"scalafix.rule.RuleCtx.filterLintMessage"),
ProblemFilters.exclude[DirectMissingMethodProblem](
"scalafix.cli.CliRunner.this"
)
"scalafix.cli.CliRunner.this"),
ProblemFilters.exclude[FinalClassProblem]("scalafix.util.TokenList"),
ProblemFilters.exclude[ReversedMissingMethodProblem](
"scalafix.util.TokenList.leadingSpaces"),
ProblemFilters.exclude[ReversedMissingMethodProblem](
"scalafix.util.TokenList.trailingSpaces")
)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ case class RuleCtxImpl(tree: Tree, config: ScalafixConfig) extends RuleCtx {
override def toString: String = syntax
def toks(t: Tree): Tokens = t.tokens(config.dialect)
lazy val tokens: Tokens = tree.tokens(config.dialect)
lazy val tokenList: TokenList = new TokenList(tokens)
lazy val tokenList: TokenList = TokenList(tokens)
lazy val matchingParens: MatchingParens = MatchingParens(tokens)
lazy val comments: AssociatedComments = AssociatedComments(tokens)
lazy val input: Input = tokens.head.input
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import scala.meta.tokens.Token
import scala.meta.tokens.Tokens

/** Helper to traverse tokens as a doubly linked list. */
class TokenList(tokens: Tokens) {
final class TokenList private (tokens: Tokens) {
def trailing(token: Token): SeqView[Token, IndexedSeq[Token]] =
tokens.view(tok2idx(token), tokens.length).drop(1)
def leading(token: Token): SeqView[Token, IndexedSeq[Token]] =
Expand Down Expand Up @@ -59,4 +59,13 @@ class TokenList(tokens: Tokens) {
}
}

def leadingSpaces(token: Token): SeqView[Token, IndexedSeq[Token]] =
leading(token).takeWhile(_.is[Token.Space])

def trailingSpaces(token: Token): SeqView[Token, IndexedSeq[Token]] =
trailing(token).takeWhile(_.is[Token.Space])
}

object TokenList {
def apply(tokens: Tokens): TokenList = new TokenList(tokens)
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,90 @@ import scalafix.util.TokenList

class TokenListTest extends FunSuite {

val tokens = "".tokenize.get // this contains two tokens: beginningOfFile and endOfFile
val firstToken = tokens.head
val lastToken = tokens.last
val tokenList = new TokenList(tokens)
val emptyFileTokens = "".tokenize.get // this contains two tokens: BOF and EOF
val nonEmptyFileTokens =
"""package foo
|
|object Bar {
| val baz = 10
|}
""".stripMargin.tokenize.get

test("Prev returns the firstToken when is given the firstToken") {
assert(tokenList.prev(firstToken) == firstToken)
assert(tokenList.prev(tokens.last) == firstToken)
test("prev returns the preceding token") {
val tokenList = TokenList(emptyFileTokens)
assert(tokenList.prev(emptyFileTokens.last) == emptyFileTokens.head)
}

test("Slice gives an empty list with same token as inputs") {
assert(tokenList.slice(firstToken, firstToken) == Seq())
test("prev returns self if there is no preceding token") {
val tokenList = TokenList(emptyFileTokens)
assert(tokenList.prev(emptyFileTokens.head) == emptyFileTokens.head)
}

test("Next breaks gives the lastToken if it is given lastToken") {
assert(tokenList.next(lastToken) == lastToken)
test("next returns the following token") {
val tokenList = TokenList(emptyFileTokens)
assert(tokenList.next(emptyFileTokens.head) == emptyFileTokens.last)
}

test("next returns self if there is no following token") {
val tokenList = TokenList(emptyFileTokens)
assert(tokenList.next(emptyFileTokens.last) == emptyFileTokens.last)
}

test("slice returns an empty seq if there is no token in between") {
val tokenList = TokenList(emptyFileTokens)
assert(tokenList.slice(emptyFileTokens.head, emptyFileTokens.head) == Seq())
}

test("slice returns all tokens between from/to tokens") {
val Some(kwObject) = nonEmptyFileTokens.find(_.is[Token.KwObject])
val Some(leftBrace) = nonEmptyFileTokens.find(_.is[Token.LeftBrace])
val tokenList = TokenList(nonEmptyFileTokens)

val slice = tokenList.slice(kwObject, leftBrace)
assert(slice.size == 3)
val Seq(space1, bar, space2) = slice
assert(space1.is[Token.Space])
assert(bar.syntax.equals("Bar"))
assert(space2.is[Token.Space])
}

test("leadingSpaces returns all spaces preceding a token") {
val Some(equals) = nonEmptyFileTokens.find(_.is[Token.Equals])
val tokenList = TokenList(nonEmptyFileTokens)

val spaces = tokenList.leadingSpaces(equals)
assert(spaces.size == 3)
val Seq(s1, s2, s3) = spaces
assert(s1 == tokenList.prev(equals))
assert(s2 == tokenList.prev(s1))
assert(s3 == tokenList.prev(s2))
}

test(
"leadingSpaces returns an empty seq if there are no spaces preceding a token") {
val Some(kwPackage) = nonEmptyFileTokens.find(_.is[Token.KwPackage])
val tokenList = TokenList(nonEmptyFileTokens)

assert(tokenList.leadingSpaces(kwPackage) == Seq())
}

test("trailingSpaces returns all spaces following a token") {
val Some(equals) = nonEmptyFileTokens.find(_.is[Token.Equals])
val tokenList = TokenList(nonEmptyFileTokens)

val spaces = tokenList.trailingSpaces(equals)
assert(spaces.size == 3)
val Seq(s1, s2, s3) = spaces
assert(s1 == tokenList.next(equals))
assert(s2 == tokenList.next(s1))
assert(s3 == tokenList.next(s2))
}

test(
"trailingSpaces returns an empty seq if there are no spaces following a token") {
val Some(rightBrace) = nonEmptyFileTokens.find(_.is[Token.RightBrace])
val tokenList = TokenList(nonEmptyFileTokens)

assert(tokenList.trailingSpaces(rightBrace) == Seq())
}
}

0 comments on commit 3f63b5b

Please sign in to comment.