Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Export diagnostics (including unused warnings) to SemanticDB #17835

Merged
merged 30 commits into from
Oct 19, 2023
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
75e6bb6
Export diagnostics (including unused warnings) to SemanticDB
tanishiking Jun 5, 2023
c5a80c4
Add -Wunused:all to the compiler options to see Semanticdb export unused
tanishiking Jun 5, 2023
e1d0ec9
Add some comments
tanishiking Jun 5, 2023
f17fb5f
ExtractSemanticDB.PostInlining now update the .semanticdb file on disk
tanishiking Jun 27, 2023
d2b54c2
Parallelize read/write SemanticDB
tanishiking Jul 19, 2023
56c5909
Run AppendDiagnostics phase after crossVersionChecks
tanishiking Jul 19, 2023
50e0fd2
[WIP] Don't parse TextDocuments in AppendDiagnostics
tanishiking Jul 20, 2023
2babbc4
Remove unnecessary suffix
tanishiking Jul 20, 2023
5186b62
Remove unused local + Context
tanishiking Jul 20, 2023
66a5306
Merge branch 'main' into export-diagnostics
tanishiking Jul 20, 2023
a414fae
Fix presentation compiler
tanishiking Jul 20, 2023
d5065ec
Parse TextDocuments instead of using `com.google.protobuf`
tanishiking Jul 25, 2023
d7258b4
Remove Context from relPath and so on
tanishiking Jul 25, 2023
4b5d3e7
Remove from parallel
tanishiking Jul 25, 2023
3008ed8
Remove ancii color code from SemanticDB
tanishiking Jul 25, 2023
c9de8e2
Extractor.extract uses unitCtx
tanishiking Jul 26, 2023
40a2a00
Remove Context parameter from Scala3.range and toSemanticDbDiagnositcs
tanishiking Jul 26, 2023
7b29f4c
Remove unused
tanishiking Jul 27, 2023
4f6a092
Revert "Remove from parallel"
tanishiking Jul 27, 2023
a6dfec2
Merge branch 'main' into export-diagnostics
tanishiking Jul 27, 2023
3daeaa6
Merge branch 'main' into export-diagnostics
tanishiking Sep 21, 2023
499c347
Remove println
tanishiking Sep 21, 2023
71a5cd0
Fix TastyBootstrapTests by adding sharable annotation to regex
tanishiking Sep 22, 2023
b28b425
Merge branch 'main' into export-diagnostics
tanishiking Oct 16, 2023
d54d8a2
Do not run toSemanticDiagnosic in parallel
tanishiking Oct 16, 2023
b8565a0
Fix wrong rebase
tanishiking Oct 16, 2023
2857632
Merge branch 'main' into export-diagnostics
tanishiking Oct 19, 2023
053e644
Restore removed comment
tanishiking Oct 19, 2023
567d486
Fix writesToOutputDir
tanishiking Oct 19, 2023
275e6fa
Uncomment the phase for appending warnings
tanishiking Oct 19, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion compiler/src/dotty/tools/dotc/Compiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class Compiler {
List(new CheckUnused.PostTyper) :: // Check for unused elements
List(new YCheckPositions) :: // YCheck positions
List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks
List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files
List(new semanticdb.ExtractSemanticDB.ExtractSemanticInfo) :: // Extract info and attach to the tree of the unit file
List(new PostTyper) :: // Additional checks and cleanups after type checking
List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only)
List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks
Expand Down Expand Up @@ -71,6 +71,7 @@ class Compiler {
new ExpandSAMs, // Expand single abstract method closures to anonymous classes
new ElimRepeated, // Rewrite vararg parameters and arguments
new RefChecks) :: // Various checks mostly related to abstract members and overriding
List(new semanticdb.ExtractSemanticDB.AppendDiagnostics) :: // Attach warnings to extracted SemanticDB and write to .semanticdb file
List(new init.Checker) :: // Check initialization of objects
List(new ProtectedAccessors, // Add accessors for protected members
new ExtensionMethods, // Expand methods of value classes with extension methods
Expand Down
9 changes: 8 additions & 1 deletion compiler/src/dotty/tools/dotc/reporting/Reporter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,14 @@ abstract class Reporter extends interfaces.ReporterResult {

private var errors: List[Error] = Nil

private var warnings: List[Warning] = Nil

/** All errors reported by this reporter (ignoring outer reporters) */
def allErrors: List[Error] = errors

/** All warnings reported by this reporter (ignoring outer reporters) */
def allWarnings: List[Warning] = warnings

/** Were sticky errors reported? Overridden in StoreReporter. */
def hasStickyErrors: Boolean = false

Expand Down Expand Up @@ -157,7 +162,9 @@ abstract class Reporter extends interfaces.ReporterResult {
markReported(d)
withMode(Mode.Printing)(doReport(d))
d match {
case _: Warning => _warningCount += 1
case w: Warning =>
warnings = w :: warnings
_warningCount += 1
case e: Error =>
errors = e :: errors
_errorCount += 1
Expand Down
20 changes: 20 additions & 0 deletions compiler/src/dotty/tools/dotc/semanticdb/DiagnosticOps.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package dotty.tools.dotc.semanticdb

import dotty.tools.dotc.reporting.Diagnostic
import dotty.tools.dotc.{semanticdb => s}
import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING}
import dotty.tools.dotc.core.Contexts.Context

object DiagnosticOps:
extension (d: Diagnostic)
def toSemanticDiagnostic(using Context): s.Diagnostic =
val severity = d.level match
case ERROR => s.Diagnostic.Severity.ERROR
case WARNING => s.Diagnostic.Severity.WARNING
case INFO => s.Diagnostic.Severity.INFORMATION
case _ => s.Diagnostic.Severity.INFORMATION
s.Diagnostic(
range = Scala3.range(d.pos.span, d.pos.source),
bishabosha marked this conversation as resolved.
Show resolved Hide resolved
severity = severity,
message = d.msg.message
)
218 changes: 159 additions & 59 deletions compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,20 +21,40 @@ import transform.SymUtils._

import scala.collection.mutable
import scala.annotation.{ threadUnsafe => tu, tailrec }
import scala.jdk.CollectionConverters._
import scala.PartialFunction.condOpt
import typer.ImportInfo.withRootImports

import dotty.tools.dotc.{semanticdb => s}
import dotty.tools.io.{AbstractFile, JarArchive}
import dotty.tools.dotc.util.Property
tanishiking marked this conversation as resolved.
Show resolved Hide resolved
import dotty.tools.dotc.semanticdb.DiagnosticOps.*
import scala.util.{Using, Failure, Success}
import com.google.protobuf.Empty
import com.google.protobuf.UnknownFieldSet
import com.google.protobuf.UnknownFieldSet.Field
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

these are java protobuf classes - you should be able to get a slightly better syntax (but no significant difference in performance) with classes in the scalapb package

Copy link
Member Author

@tanishiking tanishiking Jul 24, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unfortunately, we cannot utilize the scalapb runtime in this context. We intentionally removed the dependency on the scalapb-runtime package in semanticdb-for-scala3.
The reason behind this decision is to avoid including the scalapb runtime and its transitive dependencies from the Scala 3 compiler.
(TBH, I'm not sure why the compiler has a dependency on com.google.protobuf 🤔 )

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it doesnt work anyway,

If we want to do this then we should copy these definitions to the dotty.tools.dotc.semanticdb.internal package
Screenshot 2023-07-24 at 15 05 19

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

there used to be a dependency on protobuf due to Zinc 1.3, but we merged yesterday depending exclusively on Zinc 1.9 which no longer has protobuf

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it doesnt work anyway,

com.google.protobuf removed recently? It works in my local environment 🤔

there used to be a dependency on protobuf due to Zinc 1.3, but we merged yesterday

Ah, ok.

Copy link
Member Author

@tanishiking tanishiking Jul 25, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure wether should we do without com.google.protobuf.
Should we still being performant by reading protobuf without parsing? (I know it's better, but should we do that later in another PR?)

I gonna look into is there an API that reads protobuf as an Empty in scalapb that doesn't depend on com.google.protobuf, but it may take some time.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could we merge the current state or one with the fixes? This would unblock scalafix, so would be pretty great to do. We could do optimization improvements later?

Copy link
Member Author

@tanishiking tanishiking Jul 25, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'll move back to "parsing TextDocuments and update it" anyway :) -> done d5065ec

import java.io.ByteArrayOutputStream
import java.io.BufferedOutputStream
tanishiking marked this conversation as resolved.
Show resolved Hide resolved


/** Extract symbol references and uses to semanticdb files.
* See https://scalameta.org/docs/semanticdb/specification.html#symbol-1
* for a description of the format.
* TODO: Also extract type information
*
* Here, we define two phases for "ExtractSemanticDB", "PostTyper" and "PostInlining".
*
* The "PostTyper" phase extracts SemanticDB information such as symbol
* definitions, symbol occurrences, type information, and synthetics
* and write .semanticdb file.
*
* The "PostInlining" phase extracts diagnostics from "ctx.reporter" and
* attaches them to the SemanticDB information extracted in the "PostTyper" phase.
* We need to run this phase after the "CheckUnused.PostInlining" phase
* so that we can extract the warnings generated by "-Wunused".
*/
class ExtractSemanticDB extends Phase:
import Scala3.{_, given}
class ExtractSemanticDB private (phaseMode: ExtractSemanticDB.PhaseMode, suffix: String) extends Phase:
tanishiking marked this conversation as resolved.
Show resolved Hide resolved
tanishiking marked this conversation as resolved.
Show resolved Hide resolved

override val phaseName: String = ExtractSemanticDB.name
override val phaseName: String = ExtractSemanticDB.phaseNamePrefix + suffix

override val description: String = ExtractSemanticDB.description

Expand All @@ -46,14 +66,141 @@ class ExtractSemanticDB extends Phase:
// Check not needed since it does not transform trees
override def isCheckable: Boolean = false

override def run(using Context): Unit =
val unit = ctx.compilationUnit
val extractor = Extractor()
extractor.extract(unit.tpdTree)
ExtractSemanticDB.write(unit.source, extractor.occurrences.toList, extractor.symbolInfos.toList, extractor.synthetics.toList)
override def runOn(units: List[CompilationUnit])(using ctx: Context): List[CompilationUnit] = {
val appendDiagnostics = phaseMode == ExtractSemanticDB.PhaseMode.AppendDiagnostics
if (appendDiagnostics)
val warnings = ctx.reporter.allWarnings.groupBy(w => w.pos.source)
units.asJava.parallelStream().forEach { unit =>
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

also I think here we should extract the unit source, and do the conversion to SemanticDiagnostic before we start parallelising

val unitCtx = ctx.fresh.setCompilationUnit(unit).withRootImports
warnings.get(unit.source).foreach { ws =>
ExtractSemanticDB.appendDiagnostics(unit.source, ws.map(_.toSemanticDiagnostic))
}
}
else
units.foreach { unit =>
val extractor = ExtractSemanticDB.Extractor()
extractor.extract(unit.tpdTree)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not quite sure but there could be a slight difference here given that the extractor isn't running with unitCtx like before

ExtractSemanticDB.write(unit.source, extractor.occurrences.toList, extractor.symbolInfos.toList, extractor.synthetics.toList)
}
units
}

def run(using Context): Unit = unsupported("run")
end ExtractSemanticDB

object ExtractSemanticDB:
import java.nio.file.Path
import java.nio.file.Files
import java.nio.file.Paths

val phaseNamePrefix: String = "extractSemanticDB"
val description: String = "extract info into .semanticdb files"

enum PhaseMode:
case ExtractSemanticInfo
case AppendDiagnostics

class ExtractSemanticInfo extends ExtractSemanticDB(PhaseMode.ExtractSemanticInfo, "ExtractSemanticInfo")

class AppendDiagnostics extends ExtractSemanticDB(PhaseMode.AppendDiagnostics, "AppendDiagnostics")

private def semanticdbTarget(using Context): Option[Path] =
Option(ctx.settings.semanticdbTarget.value)
.filterNot(_.isEmpty)
.map(Paths.get(_))

private def outputDirectory(using Context): AbstractFile = ctx.settings.outputDir.value

private def absolutePath(path: Path): Path = path.toAbsolutePath.normalize

private def write(
source: SourceFile,
occurrences: List[SymbolOccurrence],
symbolInfos: List[SymbolInformation],
synthetics: List[Synthetic],
)(using Context): Unit =
val outpath = semanticdbPath(source)
Files.createDirectories(outpath.getParent())
val doc: TextDocument = TextDocument(
schema = Schema.SEMANTICDB4,
language = Language.SCALA,
uri = Tools.mkURIstring(Paths.get(relPath(source))),
text = "",
md5 = internal.MD5.compute(String(source.content)),
symbols = symbolInfos,
occurrences = occurrences,
synthetics = synthetics,
)
val docs = TextDocuments(List(doc))
val out = Files.newOutputStream(outpath)
try
val stream = internal.SemanticdbOutputStream.newInstance(out)
docs.writeTo(stream)
stream.flush()
finally
out.close()
end write

private def appendDiagnostics(
source: SourceFile,
tanishiking marked this conversation as resolved.
Show resolved Hide resolved
diagnostics: Seq[Diagnostic]
)(using Context): Unit =
tanishiking marked this conversation as resolved.
Show resolved Hide resolved
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

so to make this safe to call in parallel we really need to get rid of that Context parameter, and it looks like its only required for semanticdbPath, so perhaps we can compute that before calling the method

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done: d7258b4

val path = semanticdbPath(source)
Using.Manager { use =>
val in = use(Files.newInputStream(path))
// val sin = internal.SemanticdbInputStream.newInstance(in)
val textDocuments = Empty.parseFrom(in)
val docsBytes = textDocuments.getUnknownFields().getField(TextDocuments.DOCUMENTS_FIELD_NUMBER).getLengthDelimitedList()
val docFields = Empty.parseFrom(docsBytes.get(0)).getUnknownFields()
if (source.file.name == "ValPattern.scala")
println(docFields)
//docMap.put(7, )

// val docs = TextDocuments.parseFrom(sin)

val bos = use(new ByteArrayOutputStream())
val sbos = internal.SemanticdbOutputStream.newInstance(bos)
val doc = TextDocument(diagnostics = diagnostics)
doc.writeTo(sbos)
sbos.flush()
val diagnosticsOnly = Empty.parseFrom(bos.toByteArray()).getUnknownFields()

val merged = docFields.toBuilder().mergeFrom(diagnosticsOnly).build()
// println(merged)
val field = Field.newBuilder().addLengthDelimited(merged.toByteString()).build()
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am AFK so I can't confirm, but I believe you should pass to addLengthDelimited the (ByteString) concatenation of docBytes.get(0) and doc.toByteString.


val fields = textDocuments.getUnknownFields().toBuilder().mergeField(TextDocuments.DOCUMENTS_FIELD_NUMBER, field).build()
// println(fields)
val updated = textDocuments.toBuilder().setUnknownFields(fields).build()
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

you should start from a fresh TextDocuments instead of reusing the existing one - the current state will be carried over in the unknown field

if (source.file.name == "ValPattern.scala")
println(updated)

val out = use(Files.newOutputStream(path))
val bout = new BufferedOutputStream(out)
updated.writeTo(bout)
bout.flush()
// val sout = internal.SemanticdbOutputStream.newInstance(out)
// TextDocuments(docs.documents.map(_.withDiagnostics(diagnostics))).writeTo(sout)
} match
case Failure(ex) =>
println(ex.getMessage())
// failed somehow, should we say something?
case Success(_) => // success to update semanticdb, say nothing
end appendDiagnostics

private def relPath(source: SourceFile)(using ctx: Context) =
SourceFile.relativePath(source, ctx.settings.sourceroot.value)

private def semanticdbPath(source: SourceFile)(using ctx: Context) =
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I believe this does not need context

Suggested change
private def semanticdbPath(source: SourceFile)(using ctx: Context) =
private def semanticdbPath(source: SourceFile) =

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is needed for relPath

  private def relPath(source: SourceFile)(using ctx: Context) =
    SourceFile.relativePath(source, ctx.settings.sourceroot.value)

and

semanticdbTarget

  private def semanticdbTarget(using Context): Option[Path] =
    Option(ctx.settings.semanticdbTarget.value)
      .filterNot(_.isEmpty)
      .map(Paths.get(_))

Should we pass around settings instead of using Context?

absolutePath(semanticdbTarget.getOrElse(outputDirectory.jpath))
.resolve("META-INF")
.resolve("semanticdb")
.resolve(relPath(source))
.resolveSibling(source.name + ".semanticdb")

/** Extractor of symbol occurrences from trees */
class Extractor extends TreeTraverser:
private class Extractor extends TreeTraverser:
import Scala3.{_, given}
given s.SemanticSymbolBuilder = s.SemanticSymbolBuilder()
val synth = SyntheticsExtractor()
given converter: s.TypeOps = s.TypeOps()
Expand Down Expand Up @@ -468,52 +615,5 @@ class ExtractSemanticDB extends Phase:
registerSymbol(vparam.symbol, symkinds)
traverse(vparam.tpt)
tparams.foreach(tp => traverse(tp.rhs))


object ExtractSemanticDB:
import java.nio.file.Path
import java.nio.file.Files
import java.nio.file.Paths

val name: String = "extractSemanticDB"
val description: String = "extract info into .semanticdb files"

private def semanticdbTarget(using Context): Option[Path] =
Option(ctx.settings.semanticdbTarget.value)
.filterNot(_.isEmpty)
.map(Paths.get(_))

private def outputDirectory(using Context): AbstractFile = ctx.settings.outputDir.value

def write(
source: SourceFile,
occurrences: List[SymbolOccurrence],
symbolInfos: List[SymbolInformation],
synthetics: List[Synthetic],
)(using Context): Unit =
def absolutePath(path: Path): Path = path.toAbsolutePath.normalize
val relPath = SourceFile.relativePath(source, ctx.settings.sourceroot.value)
val outpath = absolutePath(semanticdbTarget.getOrElse(outputDirectory.jpath))
.resolve("META-INF")
.resolve("semanticdb")
.resolve(relPath)
.resolveSibling(source.name + ".semanticdb")
Files.createDirectories(outpath.getParent())
val doc: TextDocument = TextDocument(
schema = Schema.SEMANTICDB4,
language = Language.SCALA,
uri = Tools.mkURIstring(Paths.get(relPath)),
text = "",
md5 = internal.MD5.compute(String(source.content)),
symbols = symbolInfos,
occurrences = occurrences,
synthetics = synthetics,
)
val docs = TextDocuments(List(doc))
val out = Files.newOutputStream(outpath)
try
val stream = internal.SemanticdbOutputStream.newInstance(out)
docs.writeTo(stream)
stream.flush()
finally
out.close()
end Extractor
end ExtractSemanticDB
2 changes: 2 additions & 0 deletions compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala
Original file line number Diff line number Diff line change
Expand Up @@ -484,6 +484,8 @@ object Scala3:

given Ordering[SymbolInformation] = Ordering.by[SymbolInformation, String](_.symbol)(IdentifierOrdering())

given Ordering[Diagnostic] = (x, y) => compareRange(x.range, y.range)

given Ordering[Synthetic] = (x, y) => compareRange(x.range, y.range)

/**
Expand Down
20 changes: 20 additions & 0 deletions compiler/src/dotty/tools/dotc/semanticdb/Tools.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ object Tools:
sb.append("Language => ").append(languageString(doc.language)).nl
sb.append("Symbols => ").append(doc.symbols.length).append(" entries").nl
sb.append("Occurrences => ").append(doc.occurrences.length).append(" entries").nl
if doc.diagnostics.nonEmpty then
sb.append("Diagnostics => ").append(doc.diagnostics.length).append(" entries").nl
if doc.synthetics.nonEmpty then
sb.append("Synthetics => ").append(doc.synthetics.length).append(" entries").nl
sb.nl
Expand All @@ -78,6 +80,10 @@ object Tools:
sb.append("Occurrences:").nl
doc.occurrences.sorted.foreach(processOccurrence)
sb.nl
if doc.diagnostics.nonEmpty then
sb.append("Diagnostics:").nl
doc.diagnostics.sorted.foreach(d => processDiag(d))
sb.nl
if doc.synthetics.nonEmpty then
sb.append("Synthetics:").nl
doc.synthetics.sorted.foreach(s => processSynth(s, synthPrinter))
Expand Down Expand Up @@ -108,6 +114,20 @@ object Tools:
private def processSynth(synth: Synthetic, printer: SyntheticPrinter)(using sb: StringBuilder): Unit =
sb.append(printer.pprint(synth)).nl

private def processDiag(d: Diagnostic)(using sb: StringBuilder): Unit =
d.range match
case Some(range) => processRange(sb, range)
case _ => sb.append("[):")
sb.append(" ")
d.severity match
case Diagnostic.Severity.ERROR => sb.append("[error]")
case Diagnostic.Severity.WARNING => sb.append("[warning]")
case Diagnostic.Severity.INFORMATION => sb.append("[info]")
case _ => sb.append("[unknown]")
sb.append(" ")
sb.append(d.message)
sb.nl

private def processOccurrence(occ: SymbolOccurrence)(using sb: StringBuilder, sourceFile: SourceFile): Unit =
occ.range match
case Some(range) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,12 @@ class SemanticdbTests:
.resolve("semanticdb")
.resolve(relpath)
.resolveSibling(filename + ".semanticdb")
println(semanticdbPath)
val expectPath = source.resolveSibling(filename.replace(".scala", ".expect.scala"))
val doc = Tools.loadTextDocument(source, relpath, semanticdbPath)
println(semanticdbPath.getFileName().toString())
if (semanticdbPath.getFileName().toString() == "ValPattern.scala.semanticdb")
println(doc)
Tools.metac(doc, rootSrc.relativize(source))(using metacSb)
val obtained = trimTrailingWhitespace(SemanticdbTests.printTextDocument(doc))
collectErrorOrUpdate(expectPath, obtained)
Expand Down Expand Up @@ -142,7 +146,8 @@ class SemanticdbTests:
"-sourceroot", expectSrc.toString,
"-classpath", target.toString,
"-Xignore-scala2-macros",
"-usejavacp"
"-usejavacp",
"-Wunused:all"
) ++ inputFiles().map(_.toString)
val exit = Main.process(args)
assertFalse(s"dotc errors: ${exit.errorCount}", exit.hasErrors)
Expand Down
4 changes: 4 additions & 0 deletions tests/semanticdb/expect/Deprecated.expect.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
object Deprecated/*<-_empty_::Deprecated.*/ {
@deprecated/*->scala::deprecated#*/ def deprecatedMethod/*<-_empty_::Deprecated.deprecatedMethod().*/ = ???/*->scala::Predef.`???`().*/
def main/*<-_empty_::Deprecated.main().*/ = deprecatedMethod/*->_empty_::Deprecated.deprecatedMethod().*/
}
4 changes: 4 additions & 0 deletions tests/semanticdb/expect/Deprecated.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
object Deprecated {
@deprecated def deprecatedMethod = ???
def main = deprecatedMethod
}
Loading
Loading