Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Export diagnostics (including unused warnings) to SemanticDB #17835

Merged
merged 30 commits into from
Oct 19, 2023
Merged
Show file tree
Hide file tree
Changes from 25 commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
75e6bb6
Export diagnostics (including unused warnings) to SemanticDB
tanishiking Jun 5, 2023
c5a80c4
Add -Wunused:all to the compiler options to see Semanticdb export unused
tanishiking Jun 5, 2023
e1d0ec9
Add some comments
tanishiking Jun 5, 2023
f17fb5f
ExtractSemanticDB.PostInlining now update the .semanticdb file on disk
tanishiking Jun 27, 2023
d2b54c2
Parallelize read/write SemanticDB
tanishiking Jul 19, 2023
56c5909
Run AppendDiagnostics phase after crossVersionChecks
tanishiking Jul 19, 2023
50e0fd2
[WIP] Don't parse TextDocuments in AppendDiagnostics
tanishiking Jul 20, 2023
2babbc4
Remove unnecessary suffix
tanishiking Jul 20, 2023
5186b62
Remove unused local + Context
tanishiking Jul 20, 2023
66a5306
Merge branch 'main' into export-diagnostics
tanishiking Jul 20, 2023
a414fae
Fix presentation compiler
tanishiking Jul 20, 2023
d5065ec
Parse TextDocuments instead of using `com.google.protobuf`
tanishiking Jul 25, 2023
d7258b4
Remove Context from relPath and so on
tanishiking Jul 25, 2023
4b5d3e7
Remove from parallel
tanishiking Jul 25, 2023
3008ed8
Remove ancii color code from SemanticDB
tanishiking Jul 25, 2023
c9de8e2
Extractor.extract uses unitCtx
tanishiking Jul 26, 2023
40a2a00
Remove Context parameter from Scala3.range and toSemanticDbDiagnositcs
tanishiking Jul 26, 2023
7b29f4c
Remove unused
tanishiking Jul 27, 2023
4f6a092
Revert "Remove from parallel"
tanishiking Jul 27, 2023
a6dfec2
Merge branch 'main' into export-diagnostics
tanishiking Jul 27, 2023
3daeaa6
Merge branch 'main' into export-diagnostics
tanishiking Sep 21, 2023
499c347
Remove println
tanishiking Sep 21, 2023
71a5cd0
Fix TastyBootstrapTests by adding sharable annotation to regex
tanishiking Sep 22, 2023
b28b425
Merge branch 'main' into export-diagnostics
tanishiking Oct 16, 2023
d54d8a2
Do not run toSemanticDiagnosic in parallel
tanishiking Oct 16, 2023
b8565a0
Fix wrong rebase
tanishiking Oct 16, 2023
2857632
Merge branch 'main' into export-diagnostics
tanishiking Oct 19, 2023
053e644
Restore removed comment
tanishiking Oct 19, 2023
567d486
Fix writesToOutputDir
tanishiking Oct 19, 2023
275e6fa
Uncomment the phase for appending warnings
tanishiking Oct 19, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion compiler/src/dotty/tools/dotc/Compiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class Compiler {
List(new CheckShadowing) :: // Check shadowing elements
List(new YCheckPositions) :: // YCheck positions
List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks
List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files
List(new semanticdb.ExtractSemanticDB.ExtractSemanticInfo) :: // Extract info and attach to the tree of the unit file
List(new PostTyper) :: // Additional checks and cleanups after type checking
List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only)
List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks
Expand Down Expand Up @@ -72,6 +72,7 @@ class Compiler {
new ExpandSAMs, // Expand single abstract method closures to anonymous classes
new ElimRepeated, // Rewrite vararg parameters and arguments
new RefChecks) :: // Various checks mostly related to abstract members and overriding
List(new semanticdb.ExtractSemanticDB.AppendDiagnostics) :: // Attach warnings to extracted SemanticDB and write to .semanticdb file
List(new init.Checker) :: // Check initialization of objects
List(new ProtectedAccessors, // Add accessors for protected members
new ExtensionMethods, // Expand methods of value classes with extension methods
Expand Down
21 changes: 16 additions & 5 deletions compiler/src/dotty/tools/dotc/reporting/Reporter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,14 @@ abstract class Reporter extends interfaces.ReporterResult {

private var errors: List[Error] = Nil

private var warnings: List[Warning] = Nil

/** All errors reported by this reporter (ignoring outer reporters) */
def allErrors: List[Error] = errors

/** All warnings reported by this reporter (ignoring outer reporters) */
def allWarnings: List[Warning] = warnings

/** Were sticky errors reported? Overridden in StoreReporter. */
def hasStickyErrors: Boolean = false

Expand Down Expand Up @@ -149,11 +154,17 @@ abstract class Reporter extends interfaces.ReporterResult {
val key = w.enablingOption.name
addUnreported(key, 1)
case _ =>
if !isHidden(dia) then // avoid isHidden test for summarized warnings so that message is not forced
markReported(dia)
withMode(Mode.Printing)(doReport(dia))
dia match {
case _: Warning => _warningCount += 1
// conditional warnings that are not enabled are not fatal
val d = dia match
case w: Warning if ctx.settings.XfatalWarnings.value => w.toError
case _ => dia
if !isHidden(d) then // avoid isHidden test for summarized warnings so that message is not forced
markReported(d)
withMode(Mode.Printing)(doReport(d))
d match {
case w: Warning =>
warnings = w :: warnings
_warningCount += 1
case e: Error =>
errors = e :: errors
_errorCount += 1
Expand Down
23 changes: 23 additions & 0 deletions compiler/src/dotty/tools/dotc/semanticdb/DiagnosticOps.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
package dotty.tools.dotc.semanticdb

import dotty.tools.dotc.reporting.Diagnostic
import dotty.tools.dotc.{semanticdb => s}
import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING}
import dotty.tools.dotc.core.Contexts.Context
import scala.annotation.internal.sharable

object DiagnosticOps:
@sharable private val asciiColorCodes = "\u001B\\[[;\\d]*m".r
extension (d: Diagnostic)
def toSemanticDiagnostic: s.Diagnostic =
val severity = d.level match
case ERROR => s.Diagnostic.Severity.ERROR
case WARNING => s.Diagnostic.Severity.WARNING
case INFO => s.Diagnostic.Severity.INFORMATION
case _ => s.Diagnostic.Severity.INFORMATION
val msg = asciiColorCodes.replaceAllIn(d.msg.message, m => "")
s.Diagnostic(
range = Scala3.range(d.pos.span, d.pos.source),
bishabosha marked this conversation as resolved.
Show resolved Hide resolved
severity = severity,
message = msg
)
212 changes: 151 additions & 61 deletions compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,20 +21,34 @@ import transform.SymUtils._

import scala.collection.mutable
import scala.annotation.{ threadUnsafe => tu, tailrec }
import scala.jdk.CollectionConverters._
import scala.PartialFunction.condOpt
import typer.ImportInfo.withRootImports

import dotty.tools.dotc.{semanticdb => s}
import dotty.tools.io.{AbstractFile, JarArchive}
import dotty.tools.dotc.semanticdb.DiagnosticOps.*
import scala.util.{Using, Failure, Success}


/** Extract symbol references and uses to semanticdb files.
* See https://scalameta.org/docs/semanticdb/specification.html#symbol-1
* for a description of the format.
* TODO: Also extract type information
*
* Here, we define two phases for "ExtractSemanticDB", "PostTyper" and "PostInlining".
*
* The "PostTyper" phase extracts SemanticDB information such as symbol
* definitions, symbol occurrences, type information, and synthetics
* and write .semanticdb file.
*
* The "PostInlining" phase extracts diagnostics from "ctx.reporter" and
* attaches them to the SemanticDB information extracted in the "PostTyper" phase.
* We need to run this phase after the "CheckUnused.PostInlining" phase
* so that we can extract the warnings generated by "-Wunused".
*/
class ExtractSemanticDB extends Phase:
import Scala3.{_, given}
class ExtractSemanticDB private (phaseMode: ExtractSemanticDB.PhaseMode) extends Phase:

override val phaseName: String = ExtractSemanticDB.name
override val phaseName: String = ExtractSemanticDB.phaseNamePrefix + phaseMode.toString()

override val description: String = ExtractSemanticDB.description

Expand All @@ -46,14 +60,140 @@ class ExtractSemanticDB extends Phase:
// Check not needed since it does not transform trees
override def isCheckable: Boolean = false

override def run(using Context): Unit =
val unit = ctx.compilationUnit
val extractor = Extractor()
extractor.extract(unit.tpdTree)
ExtractSemanticDB.write(unit.source, extractor.occurrences.toList, extractor.symbolInfos.toList, extractor.synthetics.toList)
override def runOn(units: List[CompilationUnit])(using ctx: Context): List[CompilationUnit] = {
val sourceRoot = ctx.settings.sourceroot.value
val appendDiagnostics = phaseMode == ExtractSemanticDB.PhaseMode.AppendDiagnostics
if (appendDiagnostics)
val warnings = ctx.reporter.allWarnings.groupBy(w => w.pos.source)
units.flatMap { unit =>
warnings.get(unit.source).map { ws =>
val unitCtx = ctx.fresh.setCompilationUnit(unit).withRootImports
val outputDir =
ExtractSemanticDB.semanticdbPath(
unit.source,
ExtractSemanticDB.outputDirectory(using unitCtx),
sourceRoot
)
(outputDir, ws.map(_.toSemanticDiagnostic))
}
}.asJava.parallelStream().forEach { case (out, warnings) =>
ExtractSemanticDB.appendDiagnostics(warnings, out)
}
else
val writeSemanticdbText = ctx.settings.semanticdbText.value
units.foreach { unit =>
val unitCtx = ctx.fresh.setCompilationUnit(unit).withRootImports
val outputDir =
ExtractSemanticDB.semanticdbPath(
unit.source,
ExtractSemanticDB.outputDirectory(using unitCtx),
sourceRoot
)
val extractor = ExtractSemanticDB.Extractor()
extractor.extract(unit.tpdTree)(using unitCtx)
ExtractSemanticDB.write(
unit.source,
extractor.occurrences.toList,
extractor.symbolInfos.toList,
extractor.synthetics.toList,
outputDir,
sourceRoot,
writeSemanticdbText
)
}
units
}

def run(using Context): Unit = unsupported("run")
end ExtractSemanticDB

object ExtractSemanticDB:
import java.nio.file.Path
import java.nio.file.Files
import java.nio.file.Paths

val phaseNamePrefix: String = "extractSemanticDB"
val description: String = "extract info into .semanticdb files"

enum PhaseMode:
case ExtractSemanticInfo
case AppendDiagnostics

class ExtractSemanticInfo extends ExtractSemanticDB(PhaseMode.ExtractSemanticInfo)

class AppendDiagnostics extends ExtractSemanticDB(PhaseMode.AppendDiagnostics)

private def semanticdbTarget(using Context): Option[Path] =
Option(ctx.settings.semanticdbTarget.value)
.filterNot(_.isEmpty)
.map(Paths.get(_))

private def outputDirectory(using Context): Path =
semanticdbTarget.getOrElse(ctx.settings.outputDir.value.jpath)

private def absolutePath(path: Path): Path = path.toAbsolutePath.normalize

private def write(
source: SourceFile,
occurrences: List[SymbolOccurrence],
symbolInfos: List[SymbolInformation],
synthetics: List[Synthetic],
outpath: Path,
sourceRoot: String,
semanticdbText: Boolean
): Unit =
Files.createDirectories(outpath.getParent())
val doc: TextDocument = TextDocument(
schema = Schema.SEMANTICDB4,
language = Language.SCALA,
uri = Tools.mkURIstring(Paths.get(relPath(source, sourceRoot))),
text = if semanticdbText then String(source.content) else "",
md5 = internal.MD5.compute(String(source.content)),
symbols = symbolInfos,
occurrences = occurrences,
synthetics = synthetics,
)
val docs = TextDocuments(List(doc))
val out = Files.newOutputStream(outpath)
try
val stream = internal.SemanticdbOutputStream.newInstance(out)
docs.writeTo(stream)
stream.flush()
finally
out.close()
end write

private def appendDiagnostics(
diagnostics: Seq[Diagnostic],
outpath: Path
): Unit =
Using.Manager { use =>
val in = use(Files.newInputStream(outpath))
val sin = internal.SemanticdbInputStream.newInstance(in)
val docs = TextDocuments.parseFrom(sin)
bishabosha marked this conversation as resolved.
Show resolved Hide resolved

val out = use(Files.newOutputStream(outpath))
val sout = internal.SemanticdbOutputStream.newInstance(out)
TextDocuments(docs.documents.map(_.withDiagnostics(diagnostics))).writeTo(sout)
sout.flush()
} match
case Failure(ex) => // failed somehow, should we say something?
case Success(_) => // success to update semanticdb, say nothing
end appendDiagnostics

private def relPath(source: SourceFile, sourceRoot: String) =
SourceFile.relativePath(source, sourceRoot)

private def semanticdbPath(source: SourceFile, base: Path, sourceRoot: String): Path =
absolutePath(base)
.resolve("META-INF")
.resolve("semanticdb")
.resolve(relPath(source, sourceRoot))
.resolveSibling(source.name + ".semanticdb")

/** Extractor of symbol occurrences from trees */
class Extractor extends TreeTraverser:
import Scala3.{_, given}
given s.SemanticSymbolBuilder = s.SemanticSymbolBuilder()
val synth = SyntheticsExtractor()
given converter: s.TypeOps = s.TypeOps()
Expand Down Expand Up @@ -465,55 +605,5 @@ class ExtractSemanticDB extends Phase:
registerSymbol(vparam.symbol, symkinds)
traverse(vparam.tpt)
tparams.foreach(tp => traverse(tp.rhs))


object ExtractSemanticDB:
import java.nio.file.Path
import java.nio.file.Files
import java.nio.file.Paths

val name: String = "extractSemanticDB"
val description: String = "extract info into .semanticdb files"

private def semanticdbTarget(using Context): Option[Path] =
Option(ctx.settings.semanticdbTarget.value)
.filterNot(_.isEmpty)
.map(Paths.get(_))

private def semanticdbText(using Context): Boolean =
ctx.settings.semanticdbText.value

private def outputDirectory(using Context): AbstractFile = ctx.settings.outputDir.value

def write(
source: SourceFile,
occurrences: List[SymbolOccurrence],
symbolInfos: List[SymbolInformation],
synthetics: List[Synthetic],
)(using Context): Unit =
def absolutePath(path: Path): Path = path.toAbsolutePath.normalize
val relPath = SourceFile.relativePath(source, ctx.settings.sourceroot.value)
val outpath = absolutePath(semanticdbTarget.getOrElse(outputDirectory.jpath))
.resolve("META-INF")
.resolve("semanticdb")
.resolve(relPath)
.resolveSibling(source.name + ".semanticdb")
Files.createDirectories(outpath.getParent())
val doc: TextDocument = TextDocument(
schema = Schema.SEMANTICDB4,
language = Language.SCALA,
uri = Tools.mkURIstring(Paths.get(relPath)),
text = if semanticdbText then String(source.content) else "",
md5 = internal.MD5.compute(String(source.content)),
symbols = symbolInfos,
occurrences = occurrences,
synthetics = synthetics,
)
val docs = TextDocuments(List(doc))
val out = Files.newOutputStream(outpath)
try
val stream = internal.SemanticdbOutputStream.newInstance(out)
docs.writeTo(stream)
stream.flush()
finally
out.close()
end Extractor
end ExtractSemanticDB
4 changes: 3 additions & 1 deletion compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ object Scala3:

private val WILDCARDTypeName = nme.WILDCARD.toTypeName

def range(span: Span, treeSource: SourceFile)(using Context): Option[Range] =
def range(span: Span, treeSource: SourceFile): Option[Range] =
def lineCol(offset: Int) = (treeSource.offsetToLine(offset), treeSource.column(offset))
val (startLine, startCol) = lineCol(span.start)
val (endLine, endCol) = lineCol(span.end)
Expand Down Expand Up @@ -486,6 +486,8 @@ object Scala3:

given Ordering[SymbolInformation] = Ordering.by[SymbolInformation, String](_.symbol)(IdentifierOrdering())

given Ordering[Diagnostic] = (x, y) => compareRange(x.range, y.range)

given Ordering[Synthetic] = (x, y) => compareRange(x.range, y.range)

/**
Expand Down
20 changes: 20 additions & 0 deletions compiler/src/dotty/tools/dotc/semanticdb/Tools.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ object Tools:
sb.append("Language => ").append(languageString(doc.language)).nl
sb.append("Symbols => ").append(doc.symbols.length).append(" entries").nl
sb.append("Occurrences => ").append(doc.occurrences.length).append(" entries").nl
if doc.diagnostics.nonEmpty then
sb.append("Diagnostics => ").append(doc.diagnostics.length).append(" entries").nl
if doc.synthetics.nonEmpty then
sb.append("Synthetics => ").append(doc.synthetics.length).append(" entries").nl
sb.nl
Expand All @@ -78,6 +80,10 @@ object Tools:
sb.append("Occurrences:").nl
doc.occurrences.sorted.foreach(processOccurrence)
sb.nl
if doc.diagnostics.nonEmpty then
sb.append("Diagnostics:").nl
doc.diagnostics.sorted.foreach(d => processDiag(d))
sb.nl
if doc.synthetics.nonEmpty then
sb.append("Synthetics:").nl
doc.synthetics.sorted.foreach(s => processSynth(s, synthPrinter))
Expand Down Expand Up @@ -108,6 +114,20 @@ object Tools:
private def processSynth(synth: Synthetic, printer: SyntheticPrinter)(using sb: StringBuilder): Unit =
sb.append(printer.pprint(synth)).nl

private def processDiag(d: Diagnostic)(using sb: StringBuilder): Unit =
d.range match
case Some(range) => processRange(sb, range)
case _ => sb.append("[):")
sb.append(" ")
d.severity match
case Diagnostic.Severity.ERROR => sb.append("[error]")
case Diagnostic.Severity.WARNING => sb.append("[warning]")
case Diagnostic.Severity.INFORMATION => sb.append("[info]")
case _ => sb.append("[unknown]")
sb.append(" ")
sb.append(d.message)
sb.nl

private def processOccurrence(occ: SymbolOccurrence)(using sb: StringBuilder, sourceFile: SourceFile): Unit =
occ.range match
case Some(range) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ class SemanticdbTests:
def collectErrorOrUpdate(expectPath: Path, obtained: String) =
if updateExpectFiles then
Files.write(expectPath, obtained.getBytes(StandardCharsets.UTF_8))
println("updated: " + expectPath)
else
val expected = new String(Files.readAllBytes(expectPath), StandardCharsets.UTF_8)
val expectName = expectPath.getFileName
Expand Down Expand Up @@ -142,7 +141,8 @@ class SemanticdbTests:
"-sourceroot", expectSrc.toString,
"-classpath", target.toString,
"-Xignore-scala2-macros",
"-usejavacp"
"-usejavacp",
"-Wunused:all"
) ++ inputFiles().map(_.toString)
val exit = Main.process(args)
assertFalse(s"dotc errors: ${exit.errorCount}", exit.hasErrors)
Expand Down
Loading
Loading