From 2d2b8959c3d93bdcf421094183cd322f7a038f11 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 21 Oct 2016 21:22:04 +1100 Subject: [PATCH 01/17] Improve hash code of Names The old approach of using the first, last, and middle characters only lays a trap for generate names that have little or no entropy at these locations. For instance, fresh existential names generated in "as seen from" operations are one such case, and when compiling large batches of files the name table can become imbalanced. This seems to be the bottleneck compiling the enourmous (generated) test suite for ScalaTest itself: https://github.com/scala/scala-dev/issues/246#issuecomment-255338925 This commit uses all characters to compute the hashCode. It improves the compilation time of ScalaTest tests from 487s to 349s (0.71x). It would still be useful to avoid generating these fresh names with a global counter, as this represents a steady name leak in long-lived Globals (e.g. the presentation compiler.) --- .../scala/reflect/internal/Names.scala | 20 +++++++++---------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index b4cde7b6a3bf..eb5bf07734ac 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -47,17 +47,15 @@ trait Names extends api.Names { /** Hashtable for finding type names quickly. */ private val typeHashtable = new Array[TypeName](HASH_SIZE) - /** - * The hashcode of a name depends on the first, the last and the middle character, - * and the length of the name. - */ - private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = - if (len > 0) - (len * (41 * 41 * 41) + - cs(offset) * (41 * 41) + - cs(offset + len - 1) * 41 + - cs(offset + (len >> 1))) - else 0 + private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = { + var h = 0 + var i = 0 + while (i < len) { + h = 31 * h + cs(i + offset) + i += 1 + } + h + } /** Is (the ASCII representation of) name at given index equal to * cs[offset..offset+len-1]? From 50ff82195d3867ea9a004342f9e2c9fde8875830 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 24 Apr 2019 09:26:27 +1000 Subject: [PATCH 02/17] Limit string interpolation intrinsic avoid compiler SOE Fallback to the old style when the more than 64 varargs are provided. Backport of a limit introduced in 2.13.x in #7678 --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 3 ++- test/files/pos/t10870.scala | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t10870.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 95b1c25a7afd..09d1115e9dc1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1604,7 +1604,8 @@ abstract class RefChecks extends Transform { if qual1.symbol == rd.StringContext_apply && treeInfo.isQualifierSafeToElide(qual) && lits.forall(lit => treeInfo.isLiteralString(lit)) && - lits.length == (args.length + 1) => + lits.length == (args.length + 1) && + args.lengthCompare(64) <= 0 => // TODO make more robust to large input so that we can drop this condition, chunk the concatenations in manageable batches val isRaw = sym == rd.StringContext_raw if (isRaw) Some((lits, args)) else { diff --git a/test/files/pos/t10870.scala b/test/files/pos/t10870.scala new file mode 100644 index 000000000000..9836821f1288 --- /dev/null +++ b/test/files/pos/t10870.scala @@ -0,0 +1,6 @@ +package example + +object Test { + val a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28, a29, a30, a31, a32, a33, a34, a35, a36, a37, a38, a39, a40, a41, a42, a43, a44, a45, a46, a47, a48, a49, a50, a51, a52, a53, a54, a55, a56, a57, a58, a59, a60, a61, a62, a63, a64, a65, a66, a67, a68, a69, a70, a71, a72, a73, a74, a75, a76, a77, a78, a79, a80, a81, a82, a83, a84, a85, a86, a87, a88, a89, a90, a91, a92, a93, a94, a95, a96, a97, a98, a99, a100, a101, a102, a103, a104, a105, a106, a107, a108, a109, a110, a111, a112, a113, a114, a115, a116, a117, a118, a119, a120, a121, a122, a123, a124, a125, a126, a127, a128, a129, a130, a131, a132, a133, a134, a135, a136, a137, a138, a139, a140, a141, a142, a143, a144, a145, a146, a147, a148, a149, a150, a151, a152, a153, a154, a155, a156, a157, a158, a159, a160, a161, a162, a163, a164, a165, a166, a167, a168, a169, a170, a171, a172, a173, a174, a175, a176, a177, a178, a179, a180, a181, a182, a183, a184, a185, a186, a187, a188, a189, a190, a191, a192, a193, a194, a195, a196, a197, a198, a199, a200, a201, a202, a203, a204, a205, a206, a207, a208, a209, a210, a211, a212, a213, a214, a215, a216, a217, a218, a219, a220, a221, a222, a223, a224, a225, a226, a227, a228, a229, a230, a231, a232, a233, a234, a235, a236, a237, a238, a239, a240, a241, a242, a243, a244, a245, a246, a247, a248, a249, a250, a251, a252, a253, a254, a255, a256 = " " + val foo = s"""$a1 $a2 $a3 $a4 $a5 $a6 $a7 $a8 $a9 $a10 $a11 $a12 $a13 $a14 $a15 $a16 $a17 $a18 $a19 $a20 $a21 $a22 $a23 $a24 $a25 $a26 $a27 $a28 $a29 $a30 $a31 $a32 $a33 $a34 $a35 $a36 $a37 $a38 $a39 $a40 $a41 $a42 $a43 $a44 $a45 $a46 $a47 $a48 $a49 $a50 $a51 $a52 $a53 $a54 $a55 $a56 $a57 $a58 $a59 $a60 $a61 $a62 $a63 $a64 $a65 $a66 $a67 $a68 $a69 $a70 $a71 $a72 $a73 $a74 $a75 $a76 $a77 $a78 $a79 $a80 $a81 $a82 $a83 $a84 $a85 $a86 $a87 $a88 $a89 $a90 $a91 $a92 $a93 $a94 $a95 $a96 $a97 $a98 $a99 $a100 $a101 $a102 $a103 $a104 $a105 $a106 $a107 $a108 $a109 $a110 $a111 $a112 $a113 $a114 $a115 $a116 $a117 $a118 $a119 $a120 $a121 $a122 $a123 $a124 $a125 $a126 $a127 $a128 $a129 $a130 $a131 $a132 $a133 $a134 $a135 $a136 $a137 $a138 $a139 $a140 $a141 $a142 $a143 $a144 $a145 $a146 $a147 $a148 $a149 $a150 $a151 $a152 $a153 $a154 $a155 $a156 $a157 $a158 $a159 $a160 $a161 $a162 $a163 $a164 $a165 $a166 $a167 $a168 $a169 $a170 $a171 $a172 $a173 $a174 $a175 $a176 $a177 $a178 $a179 $a180 $a181 $a182 $a183 $a184 $a185 $a186 $a187 $a188 $a189 $a190 $a191 $a192 $a193 $a194 $a195 $a196 $a197 $a198 $a199 $a200 $a201 $a202 $a203 $a204 $a205 $a206 $a207 $a208 $a209 $a210 $a211 $a212 $a213 $a214 $a215 $a216 $a217 $a218 $a219 $a220 $a221 $a222 $a223 $a224 $a225 $a226 $a227 $a228 $a229 $a230 $a231 $a232 $a233 $a234 $a235 $a236 $a237 $a238 $a239 $a240 $a241 $a242 $a243 $a244 $a245 $a246 $a247 $a248 $a249 $a250 $a251 $a252 $a253 $a254 $a255 $a256""" +} From 86c5a0385a305491d4267847de0be711811049bf Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 25 Apr 2019 15:42:53 -0700 Subject: [PATCH 03/17] correct jansi version in intellij setup --- src/intellij/scala.ipr.SAMPLE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index b5f03d96d7e8..ed483d019c86 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -363,7 +363,7 @@ - + From ee2719585e40cb4e9e523e20061a6a2075f4d49d Mon Sep 17 00:00:00 2001 From: Michael Pollmeier Date: Tue, 30 Apr 2019 14:23:00 +1200 Subject: [PATCH 04/17] fix XSS vulnerability in scaladoc search to trigger XSS vuln, simply paste this into the search bar: ``` "\>{{7*7}} ``` all credit for finding the vulnerability goes to *Yeasir Arafat* --- src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js index 087c975aedd1..e899f06b5c0f 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js @@ -532,6 +532,7 @@ function searchAll() { scheduler.clear("search"); // clear previous search maxJobs = 1; // clear previous max var searchStr = $("#textfilter input").attr("value").trim() || ''; + searchStr = escape(searchStr); if (searchStr === '') { $("div#search-results").hide(); From fa355d04444a32e2b7769ba8868d594b24daf3da Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 30 Apr 2019 15:59:26 +1000 Subject: [PATCH 05/17] Optimize importedSymbol Call TypeName.toTermName less frequently. --- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 3b1d75567f02..c2a49d19c1b1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1532,10 +1532,13 @@ trait Contexts { self: Analyzer => var selectors = tree.selectors @inline def current = selectors.head while ((selectors ne Nil) && result == NoSymbol) { - if (current.rename == name.toTermName) + def sameName(name: Name, other: Name) = { + (name eq other) || (name ne null) && name.start == other.start + } + if (sameName(current.rename, name)) result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports if (name.isTypeName) current.name.toTypeName else current.name) - else if (current.name == name.toTermName) + else if (sameName(current.name, name)) renamed = true else if (current.name == nme.WILDCARD && !renamed && !requireExplicit) result = qual.tpe.nonLocalMember(name) From 5e8355a621b1e34203cca4e02d3e371e7ef2e400 Mon Sep 17 00:00:00 2001 From: Michael Pollmeier Date: Fri, 3 May 2019 10:43:49 +1200 Subject: [PATCH 06/17] fix xss by writing the input parameter properly to the dom rather than escaping the search string, which breaks the search for e.g. `:+` solution contributed by NthPortal in https://github.com/scala/scala/pull/8018#issuecomment-488546695 --- .../scala/tools/nsc/doc/html/resource/lib/index.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js index e899f06b5c0f..379cb701b471 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js @@ -532,7 +532,6 @@ function searchAll() { scheduler.clear("search"); // clear previous search maxJobs = 1; // clear previous max var searchStr = $("#textfilter input").attr("value").trim() || ''; - searchStr = escape(searchStr); if (searchStr === '') { $("div#search-results").hide(); @@ -563,9 +562,12 @@ function searchAll() { entityResults.appendChild(entityH1); $("div#results-content") - .prepend("" - +" Showing results for \"" + searchStr + "\"" - +""); + .prepend( + $("") + .addClass("search-text") + .append(document.createTextNode(" Showing results for ")) + .append($("").addClass("query-str").text(searchStr)) + ); var regExp = compilePattern(searchStr); From 1ad22f1e77cb274844a9ce369201f3ec10b9cb0b Mon Sep 17 00:00:00 2001 From: Michael Pollmeier Date: Fri, 3 May 2019 10:51:18 +1200 Subject: [PATCH 07/17] four space indentation --- .../scala/tools/nsc/doc/html/resource/lib/index.js | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js index 379cb701b471..33b49b6d76f1 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js @@ -561,13 +561,12 @@ function searchAll() { entityH1.innerHTML = "Entity results"; entityResults.appendChild(entityH1); - $("div#results-content") - .prepend( - $("") - .addClass("search-text") - .append(document.createTextNode(" Showing results for ")) - .append($("").addClass("query-str").text(searchStr)) - ); + $("div#results-content").prepend( + $("") + .addClass("search-text") + .append(document.createTextNode(" Showing results for ")) + .append($("").addClass("query-str").text(searchStr)) + ); var regExp = compilePattern(searchStr); From 3347caa572d39efe474fed416f056f465f1e4123 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Apr 2019 14:29:25 +1000 Subject: [PATCH 08/17] Remove unused, duplicated copy of findMacroClassLoader --- .../scala/tools/nsc/plugins/Plugins.scala | 34 ------------------- .../nsc/GlobalCustomizeClassloaderTest.scala | 1 - 2 files changed, 35 deletions(-) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index d30cf712f8ac..8d47bfa329dc 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -167,38 +167,4 @@ trait Plugins { global: Global => (for (plug <- roughPluginsList ; help <- plug.optionsHelp) yield { "\nOptions for plugin '%s':\n%s\n".format(plug.name, help) }).mkString - - /** Obtains a `ClassLoader` instance used for macro expansion. - * - * By default a new `ScalaClassLoader` is created using the classpath - * from global and the classloader of self as parent. - * - * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. - */ - protected def findMacroClassLoader(): ClassLoader = { - val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { - for { - file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) - af <- Option(nsc.io.AbstractFile getDirectory file) - } yield af.file.toURI.toURL - } else global.classPath.asURLs - def newLoader: () => ScalaClassLoader.URLClassLoader = () => { - analyzer.macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) - ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) - } - - val policy = settings.YcacheMacroClassLoader.value - val cache = Macros.macroClassLoadersCache - val disableCache = policy == settings.CachePolicy.None.name - val checkStamps = policy == settings.CachePolicy.LastModified.name - cache.checkCacheability(classpath, checkStamps, disableCache) match { - case Left(msg) => - analyzer.macroLogVerbose(s"macro classloader: $msg.") - val loader = newLoader() - closeableRegistry.registerClosable(loader) - loader - case Right(paths) => - cache.getOrCreate(paths, newLoader, closeableRegistry, checkStamps) - } - } } diff --git a/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala index 500379706090..9f93c6acaa76 100644 --- a/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala +++ b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala @@ -17,7 +17,6 @@ class GlobalCustomizeClassloaderTest { // that properly closes them before one of the elements needs to be overwritten. @Test def test(): Unit = { val g = new Global(new Settings) { - override protected[scala] def findMacroClassLoader(): ClassLoader = getClass.getClassLoader override protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { val d = new VirtualDirectory("", None) val xml = d.fileNamed("scalac-plugin.xml") From e4406b94c16a0799741607235c42f6f06965e538 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Apr 2019 14:33:55 +1000 Subject: [PATCH 09/17] Improve timer-based eviction of classloader caches Cancel in-progress timer task on a cache hit. This avoids reducing the effective deferred close delay when the old timer task fires and sees a ref count of zero, even though the ref count has since been positive. --- .../ZipAndJarFileLookupFactory.scala | 60 ++++++++++++------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 2321f0ff80f0..c8c759f07cd4 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -197,35 +197,49 @@ final class FileBasedCache[T] { private case class Stamp(lastModified: FileTime, size: Long, fileKey: Object) private case class Entry(stamps: Seq[Stamp], t: T) { val referenceCount: AtomicInteger = new AtomicInteger(1) + var timerTask: TimerTask = null + def cancelTimer(): Unit = { + timerTask match { + case null => + case t => t.cancel() + } + } } private val cache = collection.mutable.Map.empty[Seq[Path], Entry] - private def referenceCountDecrementer(e: Entry, paths: Seq[Path]): Closeable = new Closeable { - var closed = false - override def close(): Unit = { - if (!closed) { - closed = true - val count = e.referenceCount.decrementAndGet() - if (count == 0) { - e.t match { - case cl: Closeable => - FileBasedCache.timer match { - case Some(timer) => - val task = new TimerTask { - override def run(): Unit = { - cache.synchronized { - if (e.referenceCount.compareAndSet(0, -1)) { - cache.remove(paths) - cl.close() + private def referenceCountDecrementer(e: Entry, paths: Seq[Path]): Closeable = { + // Cancel the deferred close timer (if any) that was started when the reference count + // last dropped to zero. + e.cancelTimer() + + new Closeable { + var closed = false + override def close(): Unit = { + if (!closed) { + closed = true + val count = e.referenceCount.decrementAndGet() + if (count == 0) { + e.t match { + case cl: Closeable => + FileBasedCache.timer match { + case Some(timer) => + val task = new TimerTask { + override def run(): Unit = { + cache.synchronized { + if (e.referenceCount.compareAndSet(0, -1)) { + cache.remove(paths) + cl.close() + } } } } - } - timer.schedule(task, FileBasedCache.deferCloseMs.toLong) - case None => - cl.close() - } - case _ => + e.timerTask = task + timer.schedule(task, FileBasedCache.deferCloseMs.toLong) + case None => + cl.close() + } + case _ => + } } } } From e5dab49ca9efcb242b9878df464aa12c74309a7e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 8 May 2019 16:44:59 +1000 Subject: [PATCH 10/17] Rationalize subclasses of Name Due to alignment, TermName_R (which doesn't cache the provided string for toString) takes up just as much space as TermName_S. The code ends up somewhat easier to read with by just encoding the difference with the a nullable field. --- .../scala/reflect/internal/Names.scala | 35 +++++-------------- test/files/run/reflection-names.check | 6 ++-- 2 files changed, 11 insertions(+), 30 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index eb5bf07734ac..51f891dc9124 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -121,9 +121,7 @@ trait Names extends api.Names { enterChars(cs, offset, len) } val next = termHashtable(h) - val termName = - if (cachedString ne null) new TermName_S(startIndex, len, next, cachedString) - else new TermName_R(startIndex, len, next) + val termName = new TermName(startIndex, len, next, cachedString) // Add the new termName to the hashtable only after it's been fully constructed termHashtable(h) = termName termName @@ -187,7 +185,7 @@ trait Names extends api.Names { * or Strings as Names. Give names the key functions the absence of which * make people want Strings all the time. */ - sealed abstract class Name(protected val index: Int, protected val len: Int) extends NameApi with CharSequence { + sealed abstract class Name(protected val index: Int, protected val len: Int, cachedString: String) extends NameApi with CharSequence { type ThisNameType >: Null <: Name protected[this] def thisName: ThisNameType @@ -470,6 +468,9 @@ trait Names extends api.Names { def isOperatorName: Boolean = decode != toString // used by ide def longString: String = nameKind + " " + decode def debugString = { val s = decode ; if (isTypeName) s + "!" else s } + + override final def toString: String = if (cachedString == null) new String(chrs, index, len) else cachedString + } implicit def AnyNameOps(name: Name): NameOps[Name] = new NameOps(name) @@ -515,28 +516,9 @@ trait Names extends api.Names { // final override def isOperatorName = false // } - /** TermName_S and TypeName_S have fields containing the string version of the name. - * TermName_R and TypeName_R recreate it each time toString is called. - */ - private final class TermName_S(index0: Int, len0: Int, next0: TermName, override val toString: String) extends TermName(index0, len0, next0) { - protected def createCompanionName(next: TypeName): TypeName = new TypeName_S(index, len, next, toString) - override def newName(str: String): TermName = newTermNameCached(str) - } - private final class TypeName_S(index0: Int, len0: Int, next0: TypeName, override val toString: String) extends TypeName(index0, len0, next0) { - override def newName(str: String): TypeName = newTypeNameCached(str) - } - - private final class TermName_R(index0: Int, len0: Int, next0: TermName) extends TermName(index0, len0, next0) { - protected def createCompanionName(next: TypeName): TypeName = new TypeName_R(index, len, next) - override def toString = new String(chrs, index, len) - } - - private final class TypeName_R(index0: Int, len0: Int, next0: TypeName) extends TypeName(index0, len0, next0) { - override def toString = new String(chrs, index, len) - } // SYNCNOTE: caller to constructor must synchronize if `synchronizeNames` is enabled - sealed abstract class TermName(index0: Int, len0: Int, val next: TermName) extends Name(index0, len0) with TermNameApi { + final class TermName(index0: Int, len0: Int, val next: TermName, cachedString: String) extends Name(index0, len0, cachedString) with TermNameApi { type ThisNameType = TermName protected[this] def thisName: TermName = this @@ -568,8 +550,7 @@ trait Names extends api.Names { newTermName(chrs, start + from, to - from) def nameKind = "term" - /** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */ - protected def createCompanionName(next: TypeName): TypeName + private def createCompanionName(next: TypeName): TypeName = new TypeName(index, len, next, cachedString) } implicit val TermNameTag = ClassTag[TermName](classOf[TermName]) @@ -579,7 +560,7 @@ trait Names extends api.Names { def unapply(name: TermName): Option[String] = Some(name.toString) } - sealed abstract class TypeName(index0: Int, len0: Int, val next: TypeName) extends Name(index0, len0) with TypeNameApi { + final class TypeName(index0: Int, len0: Int, val next: TypeName, cachedString: String) extends Name(index0, len0, cachedString) with TypeNameApi { type ThisNameType = TypeName protected[this] def thisName: TypeName = this diff --git a/test/files/run/reflection-names.check b/test/files/run/reflection-names.check index f8cb78cc67b4..52748e20c5db 100644 --- a/test/files/run/reflection-names.check +++ b/test/files/run/reflection-names.check @@ -1,4 +1,4 @@ (java.lang.String,bc) -(scala.reflect.internal.Names$TermName_R,bc) -(scala.reflect.internal.Names$TypeName_R,bc) -(scala.reflect.internal.Names$TypeName_R,bc) +(scala.reflect.internal.Names$TermName,bc) +(scala.reflect.internal.Names$TypeName,bc) +(scala.reflect.internal.Names$TypeName,bc) From 3b57788ba394631dd023d4a3493b75177e4d6914 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 9 May 2019 08:35:58 +1000 Subject: [PATCH 11/17] Avoid direct use of Names.chrs from Symbols --- src/reflect/scala/reflect/internal/Names.scala | 6 ++++-- src/reflect/scala/reflect/internal/Symbols.scala | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 51f891dc9124..6fe21ad426ab 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -38,7 +38,7 @@ trait Names extends api.Names { private val nameLock: Object = new Object /** Memory to store all names sequentially. */ - var chrs: Array[Char] = new Array[Char](NAME_SIZE) + var chrs: Array[Char] = new Array[Char](NAME_SIZE) // TODO this ought to be private private var nc = 0 /** Hashtable for finding term names quickly. */ @@ -470,7 +470,9 @@ trait Names extends api.Names { def debugString = { val s = decode ; if (isTypeName) s + "!" else s } override final def toString: String = if (cachedString == null) new String(chrs, index, len) else cachedString - + final def appendTo(buffer: java.lang.StringBuffer, start: Int, length: Int): Unit = { + buffer.append(chrs, this.start + start, length) + } } implicit def AnyNameOps(name: Name): NameOps[Name] = new NameOps(name) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 3341cee8aa2f..7982e71000c9 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1310,11 +1310,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (sym.isRoot || sym.isRootPackage || sym == NoSymbol || sym.owner.isEffectiveRoot) { val capacity = size + nSize b = new java.lang.StringBuffer(capacity) - b.append(chrs, symName.start, nSize) + symName.appendTo(b, 0, nSize) } else { loop(size + nSize + 1, sym.effectiveOwner.enclClass) b.append(separator) - b.append(chrs, symName.start, nSize) + symName.appendTo(b, 0, nSize) } } loop(suffix.length(), this) From 22f67798ef116e848a888c06ddeab8f3746460e0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 9 May 2019 08:41:17 +1000 Subject: [PATCH 12/17] Deprecate external access to Names.chrs --- .../scala/reflect/internal/Names.scala | 53 ++++++++++--------- 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 6fe21ad426ab..7e19e72e9ea7 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -37,8 +37,13 @@ trait Names extends api.Names { protected def synchronizeNames: Boolean = false private val nameLock: Object = new Object + /** Memory to store all names sequentially. */ - var chrs: Array[Char] = new Array[Char](NAME_SIZE) // TODO this ought to be private + private[this] var _chrs: Array[Char] = new Array[Char](NAME_SIZE) // TODO this ought to be private + @deprecated("Don't access name table contents directly.", "2.12.9") + def chrs: Array[Char] = _chrs + @deprecated("Don't access name table contents directly.", "2.12.9") + def chrs_=(cs: Array[Char]) = _chrs = cs private var nc = 0 /** Hashtable for finding term names quickly. */ @@ -62,7 +67,7 @@ trait Names extends api.Names { */ private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = { var i = 0 - while ((i < len) && (chrs(index + i) == cs(offset + i))) + while ((i < len) && (_chrs(index + i) == cs(offset + i))) i += 1 i == len } @@ -71,12 +76,12 @@ trait Names extends api.Names { private def enterChars(cs: Array[Char], offset: Int, len: Int) { var i = 0 while (i < len) { - if (nc + i == chrs.length) { - val newchrs = new Array[Char](chrs.length * 2) - java.lang.System.arraycopy(chrs, 0, newchrs, 0, chrs.length) - chrs = newchrs + if (nc + i == _chrs.length) { + val newchrs = new Array[Char](_chrs.length * 2) + java.lang.System.arraycopy(_chrs, 0, newchrs, 0, chrs.length) + _chrs = newchrs } - chrs(nc + i) = cs(offset + i) + _chrs(nc + i) = cs(offset + i) i += 1 } if (len == 0) nc += 1 @@ -113,7 +118,7 @@ trait Names extends api.Names { // that name.toString will become an eager val, in which case the call // to enterChars cannot follow the construction of the TermName. var startIndex = 0 - if (cs == chrs) { + if (cs == _chrs) { // Optimize for subName, the new name is already stored in chrs startIndex = offset } else { @@ -225,7 +230,7 @@ trait Names extends api.Names { /** Copy bytes of this name to buffer cs, starting at position `offset`. */ final def copyChars(cs: Array[Char], offset: Int) = - java.lang.System.arraycopy(chrs, index, cs, offset, len) + java.lang.System.arraycopy(_chrs, index, cs, offset, len) /** @return the ascii representation of this name */ final def toChars: Array[Char] = { // used by ide @@ -271,7 +276,7 @@ trait Names extends api.Names { ****/ /** @return the i'th Char of this name */ - final def charAt(i: Int): Char = chrs(index + i) + final def charAt(i: Int): Char = _chrs(index + i) /** @return the index of first occurrence of char c in this name, length if not found */ final def pos(c: Char): Int = pos(c, 0) @@ -288,7 +293,7 @@ trait Names extends api.Names { */ final def pos(c: Char, start: Int): Int = { var i = start - while (i < len && chrs(index + i) != c) i += 1 + while (i < len && _chrs(index + i) != c) i += 1 i } @@ -305,7 +310,7 @@ trait Names extends api.Names { if (sLen == 1) return i while (i + sLen <= len) { var j = 1 - while (s.charAt(j) == chrs(index + i + j)) { + while (s.charAt(j) == _chrs(index + i + j)) { j += 1 if (j == sLen) return i } @@ -331,7 +336,7 @@ trait Names extends api.Names { */ final def lastPos(c: Char, start: Int): Int = { var i = start - while (i >= 0 && chrs(index + i) != c) i -= 1 + while (i >= 0 && _chrs(index + i) != c) i -= 1 i } @@ -342,14 +347,14 @@ trait Names extends api.Names { final def startsWith(prefix: Name, start: Int): Boolean = { var i = 0 while (i < prefix.length && start + i < len && - chrs(index + start + i) == chrs(prefix.start + i)) + _chrs(index + start + i) == _chrs(prefix.start + i)) i += 1 i == prefix.length } final def startsWith(prefix: String, start: Int): Boolean = { var i = 0 while (i < prefix.length && start + i < len && - chrs(index + start + i) == prefix.charAt(i)) + _chrs(index + start + i) == prefix.charAt(i)) i += 1 i == prefix.length } @@ -361,14 +366,14 @@ trait Names extends api.Names { final def endsWith(suffix: Name, end: Int): Boolean = { var i = 1 while (i <= suffix.length && i <= end && - chrs(index + end - i) == chrs(suffix.start + suffix.length - i)) + _chrs(index + end - i) == _chrs(suffix.start + suffix.length - i)) i += 1 i > suffix.length } final def endsWith(suffix: String, end: Int): Boolean = { var i = 1 while (i <= suffix.length && i <= end && - chrs(index + end - i) == suffix.charAt(suffix.length - i)) + _chrs(index + end - i) == suffix.charAt(suffix.length - i)) i += 1 i > suffix.length } @@ -384,7 +389,7 @@ trait Names extends api.Names { var i = index val max = index + len while (i < max) { - if (chrs(i) == ch) + if (_chrs(i) == ch) return true i += 1 } @@ -469,9 +474,9 @@ trait Names extends api.Names { def longString: String = nameKind + " " + decode def debugString = { val s = decode ; if (isTypeName) s + "!" else s } - override final def toString: String = if (cachedString == null) new String(chrs, index, len) else cachedString + override final def toString: String = if (cachedString == null) new String(_chrs, index, len) else cachedString final def appendTo(buffer: java.lang.StringBuffer, start: Int, length: Int): Unit = { - buffer.append(chrs, this.start + start, length) + buffer.append(_chrs, this.start + start, length) } } @@ -530,7 +535,7 @@ trait Names extends api.Names { def toTypeName: TypeName = { def body = { // Re-computing the hash saves a field for storing it in the TermName - val h = hashValue(chrs, index, len) & HASH_MASK + val h = hashValue(_chrs, index, len) & HASH_MASK var n = typeHashtable(h) while ((n ne null) && n.start != index) n = n.next @@ -549,7 +554,7 @@ trait Names extends api.Names { def newName(str: String): TermName = newTermName(str) def companionName: TypeName = toTypeName def subName(from: Int, to: Int): TermName = - newTermName(chrs, start + from, to - from) + newTermName(_chrs, start + from, to - from) def nameKind = "term" private def createCompanionName(next: TypeName): TypeName = new TypeName(index, len, next, cachedString) @@ -571,7 +576,7 @@ trait Names extends api.Names { def toTermName: TermName = { def body = { // Re-computing the hash saves a field for storing it in the TypeName - val h = hashValue(chrs, index, len) & HASH_MASK + val h = hashValue(_chrs, index, len) & HASH_MASK var n = termHashtable(h) while ((n ne null) && n.start != index) n = n.next @@ -585,7 +590,7 @@ trait Names extends api.Names { def newName(str: String): TypeName = newTypeName(str) def companionName: TermName = toTermName def subName(from: Int, to: Int): TypeName = - newTypeName(chrs, start + from, to - from) + newTypeName(_chrs, start + from, to - from) def nameKind = "type" override def decode = if (nameDebug) super.decode + "!" else super.decode From f3901f0b9ec353fdd542cf6812bb7b9e63198ad5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 12 May 2019 09:36:32 +1000 Subject: [PATCH 13/17] Fix regression in import name comparison ``` scala> :power Power mode enabled. :phase is at typer. import scala.tools.nsc._, intp.global._, definitions._ Try :help or completions for vals._ and power._ scala> val t = TermName("abcdefghijklmnopqrstuvwxyz") t: $r.intp.global.TermName = abcdefghijklmnopqrstuvwxyz scala> t.subName(0, 25) res0: $r.intp.global.TermName = abcdefghijklmnopqrstuvwxy scala> res0.start res1: Int = 474232 scala> t.start res2: Int = 474232 ``` --- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index c2a49d19c1b1..c23c57f1024a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1533,7 +1533,7 @@ trait Contexts { self: Analyzer => @inline def current = selectors.head while ((selectors ne Nil) && result == NoSymbol) { def sameName(name: Name, other: Name) = { - (name eq other) || (name ne null) && name.start == other.start + (name eq other) || (name ne null) && name.start == other.start && name.length == other.length } if (sameName(current.rename, name)) result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports From 0b28d2fb9e84e21ea3744a3f28a258661859de07 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 17 May 2019 12:57:50 +1000 Subject: [PATCH 14/17] Fix scalap parsing/printing of enum and class constant types --- .../scalap/scalax/rules/scalasig/ScalaSig.scala | 4 +++- .../scalax/rules/scalasig/ScalaSigPrinter.scala | 2 ++ test/files/scalap/constants.check | 16 ++++++++++++++++ test/files/scalap/constants.scala | 17 +++++++++++++++++ 4 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 test/files/scalap/constants.check create mode 100644 test/files/scalap/constants.scala diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala index b268bd99c9de..b8ef18306815 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala @@ -246,7 +246,9 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules { 32 -~ longValue ^^ (java.lang.Double.longBitsToDouble), 33 -~ nameRef, 34 -^ null, - 35 -~ typeRef) + 35 -~ typeRef, + 36 -~ symbolRef + ) lazy val attributeInfo = 40 -~ symbolRef ~ typeRef ~ (constantRef?) ~ (nameRef ~ constantRef *) ^~~~^ AttributeInfo // sym_Ref info_Ref {constant_Ref} {nameRef constantRef} lazy val children = 41 -~ (nat*) ^^ Children //sym_Ref {sym_Ref} diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala index 29b38c6c1baa..2b18f9a6c392 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala @@ -342,6 +342,8 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { case _: Double => "scala.Double" case _: String => "java.lang.String" case c: Class[_] => "java.lang.Class[" + c.getComponentType.getCanonicalName.replace("$", ".") + "]" + case e: ExternalSymbol => e.parent.get.path + case tp: Type => "java.lang.Class[" + toString(tp, sep) + "]" }) case TypeRefType(prefix, symbol, typeArgs) => sep + (symbol.path match { case "scala." => flags match { diff --git a/test/files/scalap/constants.check b/test/files/scalap/constants.check new file mode 100644 index 000000000000..705cb8c44ade --- /dev/null +++ b/test/files/scalap/constants.check @@ -0,0 +1,16 @@ +class Constants extends scala.AnyRef { + def this() = { /* compiled code */ } + final val UnitConstant: scala.Unit = { /* compiled code */ } + final val ByteConstant: scala.Boolean = { /* compiled code */ } + final val CharConstant: scala.Char = { /* compiled code */ } + final val ShortConstant: scala.Short = { /* compiled code */ } + final val IntConstant: scala.Int = { /* compiled code */ } + final val LongConstant: scala.Long = { /* compiled code */ } + final val FloatConstant: scala.Float = { /* compiled code */ } + final val DoubleConstant: scala.Double = { /* compiled code */ } + final val NullConstant: scala.Null = { /* compiled code */ } + final val ClassConstant: java.lang.Class[scala.Predef.String] = { /* compiled code */ } + final val ClassConstant2: java.lang.Class[scala.Some[_]] = { /* compiled code */ } + final val EnumConstant: java.util.concurrent.TimeUnit = { /* compiled code */ } + final val StringConstant: java.lang.Class[scala.Predef.String] = { /* compiled code */ } +} diff --git a/test/files/scalap/constants.scala b/test/files/scalap/constants.scala new file mode 100644 index 000000000000..0a01a9f37809 --- /dev/null +++ b/test/files/scalap/constants.scala @@ -0,0 +1,17 @@ +class Constants { + final val UnitConstant = () + final val ByteConstant = false + final val CharConstant = 'a' + final val ShortConstant = 1.toShort + final val IntConstant = 1 + final val LongConstant = 1L + final val FloatConstant = 1f + final val DoubleConstant = 1d + + final val NullConstant = null + + final val ClassConstant = classOf[String] + final val ClassConstant2 = classOf[Some[_]] + final val EnumConstant = java.util.concurrent.TimeUnit.DAYS + final val StringConstant = classOf[String] +} From 78066506f746e7b69f0e76608b84ef60aed2cfe4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Oct 2018 11:02:53 +1000 Subject: [PATCH 15/17] Avoid typechecking val and def tpt-s twice Demo: ``` $ cat sandbox/test.scala && (scalac-ref 2.12.x -Ytyper-debug sandbox/test.scala 2>&1) > /tmp/old.log && (qscalac -Ytyper-debug sandbox/test.scala 2>&1) > /tmp/new.log && diff -U1000 /tmp/{old,new}.log ``` ```scala trait C { type X def foo: X } ``` ```diff --- /tmp/old.log 2019-05-20 13:56:45.000000000 +1000 +++ /tmp/new.log 2019-05-20 13:56:47.000000000 +1000 @@ -1,12 +1,10 @@ |-- EXPRmode-POLYmode-QUALmode (site: package ) +|-- EXPRmode-POLYmode-QUALmode (site: package ) | \-> .type |-- class C BYVALmode-EXPRmode (site: package ) | |-- X BYVALmode-EXPRmode (site: trait C) | | \-> [type X] C.this.X | |-- def foo BYVALmode-EXPRmode (site: trait C) | | |-- X TYPEmode (site: method foo in C) | | | \-> C.this.X -| | |-- X TYPEmode (site: method foo in C) -| | | \-> C.this.X | | \-> [def foo] => C.this.X | \-> [trait C] C ``` --- .../scala/tools/nsc/typechecker/Namers.scala | 12 ++++++-- .../scala/tools/nsc/typechecker/Typers.scala | 4 +-- test/files/neg/t2918.check | 5 +--- test/files/neg/t5093.check | 5 +--- test/files/run/analyzerPlugins.check | 28 +++++++++---------- .../scala/reflect/internal/PrintersTest.scala | 2 +- 6 files changed, 29 insertions(+), 27 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 74db109014c0..53bf0b655671 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1311,7 +1311,11 @@ trait Namers extends MethodSynthesis { val resTpGiven = if (tpt.isEmpty) WildcardType - else typer.typedType(tpt).tpe + else { + val tptTyped = typer.typedType(tpt) + context.unit.transformed(tpt) = tptTyped + tptTyped.tpe + } // ignore missing types unless we can look to overridden method to recover the missing information @@ -1723,7 +1727,11 @@ trait Namers extends MethodSynthesis { tptFromRhsUnderPt } - } else typer.typedType(tpt).tpe + } else { + val tptTyped = typer.typedType(tpt) + context.unit.transformed(tpt) = tptTyped + tptTyped.tpe + } // println(s"val: $result / ${vdef.tpt.tpe} / ") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 45d118b5fab2..0a3002d04c80 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2087,7 +2087,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } else typedModifiers(vdef.mods) sym.annotations.map(_.completeInfo()) - val tpt1 = checkNoEscaping.privates(this, sym, typedType(vdef.tpt)) + val tpt1 = checkNoEscaping.privates(this, sym, transformedOr(vdef.tpt, typedType(vdef.tpt))) checkNonCyclic(vdef, tpt1) // allow trait accessors: it's the only vehicle we have to hang on to annotations that must be passed down to @@ -2315,7 +2315,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (isRepeatedParamType(vparam1.symbol.tpe)) StarParamNotLastError(vparam1) - val tpt1 = checkNoEscaping.privates(this, meth, typedType(ddef.tpt)) + val tpt1 = checkNoEscaping.privates(this, meth, transformedOr(ddef.tpt, typedType(ddef.tpt))) checkNonCyclic(ddef, tpt1) ddef.tpt.setType(tpt1.tpe) val typedMods = typedModifiers(ddef.mods) diff --git a/test/files/neg/t2918.check b/test/files/neg/t2918.check index aae3045e8af7..f45494d78131 100644 --- a/test/files/neg/t2918.check +++ b/test/files/neg/t2918.check @@ -4,7 +4,4 @@ t2918.scala:2: error: illegal cyclic reference involving type A t2918.scala:2: error: cyclic aliasing or subtyping involving type A def g[X, A[X] <: A[X]](x: A[X]) = x ^ -t2918.scala:2: error: A does not take type parameters - def g[X, A[X] <: A[X]](x: A[X]) = x - ^ -three errors found +two errors found diff --git a/test/files/neg/t5093.check b/test/files/neg/t5093.check index daba46001153..b794f023e56d 100644 --- a/test/files/neg/t5093.check +++ b/test/files/neg/t5093.check @@ -4,7 +4,4 @@ t5093.scala:2: error: illegal cyclic reference involving type C t5093.scala:2: error: cyclic aliasing or subtyping involving type C def f[C[X] <: C[X]](l: C[_]) = l.x ^ -t5093.scala:2: error: C does not take type parameters - def f[C[X] <: C[X]](l: C[_]) = l.x - ^ -three errors found +two errors found diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check index 64b68db242eb..3cfbda651639 100644 --- a/test/files/run/analyzerPlugins.check +++ b/test/files/run/analyzerPlugins.check @@ -14,18 +14,18 @@ canAdaptAnnotations(Trees$Select, ?) [1] canAdaptAnnotations(Trees$Select, Boolean @testAnn) [1] canAdaptAnnotations(Trees$Select, Boolean) [1] canAdaptAnnotations(Trees$Select, String @testAnn) [1] -canAdaptAnnotations(Trees$TypeTree, ?) [8] +canAdaptAnnotations(Trees$TypeTree, ?) [7] canAdaptAnnotations(Trees$Typed, ?) [3] canAdaptAnnotations(Trees$Typed, Any) [1] canAdaptAnnotations(Trees$Typed, Int) [1] lub(List(Int @testAnn, Int)) [1] -pluginsPt(?, Trees$Annotated) [7] +pluginsPt(?, Trees$Annotated) [6] pluginsPt(?, Trees$Apply) [11] pluginsPt(?, Trees$ApplyImplicitView) [2] pluginsPt(?, Trees$Block) [4] pluginsPt(?, Trees$ClassDef) [2] pluginsPt(?, Trees$DefDef) [14] -pluginsPt(?, Trees$Ident) [51] +pluginsPt(?, Trees$Ident) [43] pluginsPt(?, Trees$If) [2] pluginsPt(?, Trees$Literal) [16] pluginsPt(?, Trees$New) [6] @@ -37,7 +37,7 @@ pluginsPt(?, Trees$This) [13] pluginsPt(?, Trees$TypeApply) [3] pluginsPt(?, Trees$TypeBoundsTree) [2] pluginsPt(?, Trees$TypeDef) [1] -pluginsPt(?, Trees$TypeTree) [32] +pluginsPt(?, Trees$TypeTree) [25] pluginsPt(?, Trees$Typed) [1] pluginsPt(?, Trees$ValDef) [13] pluginsPt(Any, Trees$Literal) [2] @@ -118,20 +118,20 @@ pluginsTyped(=> String @testAnn, Trees$Select) [1] pluginsTyped(A, Trees$Apply) [1] pluginsTyped(A, Trees$Ident) [2] pluginsTyped(A, Trees$This) [1] -pluginsTyped(A, Trees$TypeTree) [4] +pluginsTyped(A, Trees$TypeTree) [2] pluginsTyped(A.super.type, Trees$Super) [1] pluginsTyped(A.this.type, Trees$This) [11] pluginsTyped(Any, Trees$TypeTree) [1] pluginsTyped(AnyRef, Trees$Select) [4] pluginsTyped(Array[Any], Trees$ArrayValue) [1] pluginsTyped(Boolean @testAnn, Trees$Select) [1] -pluginsTyped(Boolean @testAnn, Trees$TypeTree) [3] +pluginsTyped(Boolean @testAnn, Trees$TypeTree) [2] pluginsTyped(Boolean(false), Trees$Literal) [1] pluginsTyped(Boolean, Trees$Apply) [1] -pluginsTyped(Boolean, Trees$Select) [3] +pluginsTyped(Boolean, Trees$Select) [2] pluginsTyped(Char('c'), Trees$Literal) [2] pluginsTyped(Double, Trees$Apply) [3] -pluginsTyped(Double, Trees$Select) [6] +pluginsTyped(Double, Trees$Select) [4] pluginsTyped(Int @testAnn, Trees$TypeTree) [2] pluginsTyped(Int @testAnn, Trees$Typed) [2] pluginsTyped(Int(0), Trees$Literal) [2] @@ -141,8 +141,8 @@ pluginsTyped(Int(2), Trees$Literal) [1] pluginsTyped(Int, Trees$Apply) [1] pluginsTyped(Int, Trees$Ident) [1] pluginsTyped(Int, Trees$If) [1] -pluginsTyped(Int, Trees$Select) [12] -pluginsTyped(Int, Trees$TypeTree) [10] +pluginsTyped(Int, Trees$Select) [10] +pluginsTyped(Int, Trees$TypeTree) [8] pluginsTyped(List[Any], Trees$Apply) [1] pluginsTyped(List[Any], Trees$Select) [1] pluginsTyped(List[Any], Trees$TypeTree) [2] @@ -158,14 +158,14 @@ pluginsTyped(String("str"), Trees$Literal) [1] pluginsTyped(String("two"), Trees$Literal) [2] pluginsTyped(String, Trees$Apply) [2] pluginsTyped(String, Trees$Block) [2] -pluginsTyped(String, Trees$Select) [7] -pluginsTyped(String, Trees$TypeTree) [6] +pluginsTyped(String, Trees$Select) [4] +pluginsTyped(String, Trees$TypeTree) [5] pluginsTyped(Unit, Trees$Apply) [2] pluginsTyped(Unit, Trees$Assign) [1] pluginsTyped(Unit, Trees$Block) [4] pluginsTyped(Unit, Trees$If) [1] pluginsTyped(Unit, Trees$Literal) [5] -pluginsTyped(Unit, Trees$TypeTree) [2] +pluginsTyped(Unit, Trees$TypeTree) [1] pluginsTyped([A](xs: A*)List[A], Trees$Select) [1] pluginsTyped([T <: Int]=> Int, Trees$Select) [1] pluginsTyped([T0]()T0, Trees$Select) [1] @@ -183,7 +183,7 @@ pluginsTyped(testAnn, Trees$Apply) [6] pluginsTyped(testAnn, Trees$Ident) [6] pluginsTyped(testAnn, Trees$New) [6] pluginsTyped(testAnn, Trees$This) [1] -pluginsTyped(testAnn, Trees$TypeTree) [2] +pluginsTyped(testAnn, Trees$TypeTree) [1] pluginsTyped(testAnn.super.type, Trees$Super) [1] pluginsTyped(type, Trees$Select) [1] pluginsTypedReturn(return f, String) [1] diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala index c7cfe0dfbb72..55aa82cceb36 100644 --- a/test/junit/scala/reflect/internal/PrintersTest.scala +++ b/test/junit/scala/reflect/internal/PrintersTest.scala @@ -332,7 +332,7 @@ class BasePrintTest { @Test def testFunc2 = assertResultCode( code = "val sum: Seq[Int] => Int = _ reduceLeft (_+_)")( parsedCode = "val sum: _root_.scala.Function1[Seq[Int], Int] = ((x$1) => x$1.reduceLeft(((x$2, x$3) => x$2.+(x$3))))", - typedCode = "val sum: _root_.scala.Function1[scala.`package`.Seq[scala.Int], scala.Int] = ((x$1: Seq[Int]) => x$1.reduceLeft[Int](((x$2: Int, x$3: Int) => x$2.+(x$3))))") + typedCode = "val sum: scala.Function1[scala.`package`.Seq[scala.Int], scala.Int] = ((x$1: Seq[Int]) => x$1.reduceLeft[Int](((x$2: Int, x$3: Int) => x$2.+(x$3))))") @Test def testFunc3 = assertResultCode( code = "List(1, 2, 3) map (_ - 1)")( From 786c6570172fccc2fd7e76bc3b0f211e061f63f8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 22 May 2019 14:04:43 +1000 Subject: [PATCH 16/17] PipelineMain: add test, make it more testable and less buggy. - Allow user specified reporter - funnel javac errors through it - funnel PipelineMain's logging through it, too. - Use a separate FileManager for each javac invocation to avoid an apparent race condition. - Expose config knobs programatically rather than only through system properties. --- .../scala/tools/nsc/PipelineMain.scala | 206 ++++++++------ .../scala/tools/nsc/DeterminismTest.scala | 36 +-- test/junit/scala/tools/nsc/FileUtils.scala | 39 +++ .../scala/tools/nsc/PipelineMainTest.scala | 260 ++++++++++++++++++ 4 files changed, 430 insertions(+), 111 deletions(-) create mode 100644 test/junit/scala/tools/nsc/FileUtils.scala create mode 100644 test/junit/scala/tools/nsc/PipelineMainTest.scala diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 24f8f8881771..463e8ef3b488 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -17,32 +17,28 @@ import java.lang.Thread.UncaughtExceptionHandler import java.nio.file.attribute.FileTime import java.nio.file.{Files, Path, Paths} import java.time.Instant -import java.util.Collections -import java.util.concurrent.atomic.AtomicInteger +import java.util.{Collections, Locale} +import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} -import javax.tools.ToolProvider +import javax.tools.Diagnostic.Kind +import javax.tools.{Diagnostic, DiagnosticListener, JavaFileObject, ToolProvider} -import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.collection.JavaConverters._ import scala.collection.{immutable, mutable, parallel} import scala.concurrent._ import scala.concurrent.duration.Duration import scala.reflect.internal.pickling.PickleBuffer -import scala.reflect.internal.util.FakePos -import scala.reflect.io.RootPath +import scala.reflect.internal.util.{BatchSourceFile, FakePos, Position} +import scala.reflect.io.{PlainNioFile, RootPath} import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success, Try} -import PipelineMain.{BuildStrategy, Pipeline, Traditional} - -class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path], useJars: Boolean) { - private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") - private val pickleCache: Path = { - if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") - else { - Paths.get(pickleCacheConfigured) - } - } +import PipelineMain.{Pipeline, Traditional} + +class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.PipelineSettings) { + import pipelineSettings._ + private val pickleCache: Path = configuredPickleCache.getOrElse(Files.createTempDirectory("scala.picklecache")) private def cachePath(file: Path): Path = { val newExtension = if (useJars) ".jar" else "" changeExtension(pickleCache.resolve("./" + file).normalize(), newExtension) @@ -120,7 +116,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } - def writeDotFile(dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { + def writeDotFile(logDir: Path, dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { val builder = new java.lang.StringBuilder() builder.append("digraph projects {\n") for ((p, deps) <- dependsOn) { @@ -133,17 +129,16 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } builder.append("}\n") - val path = Paths.get("projects.dot") + val path = logDir.resolve("projects.dot") Files.write(path, builder.toString.getBytes(java.nio.charset.StandardCharsets.UTF_8)) - println("Wrote project dependency graph to: " + path.toAbsolutePath) + reporter.echo("Wrote project dependency graph to: " + path.toAbsolutePath) } private case class Dependency(t: Task, isMacro: Boolean, isPlugin: Boolean) def process(): Boolean = { - println(s"parallelism = $parallelism, strategy = $strategy") - - reporter = new ConsoleReporter(new Settings(scalacError)) + reporter = createReporter(new Settings(scalacError)) + reporter.echo(s"parallelism = $parallelism, strategy = $strategy") def commandFor(argFileArg: Path): Task = { val ss = new Settings(scalacError) @@ -152,6 +147,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } val projects: List[Task] = argFiles.toList.map(commandFor) + if (reporter.hasErrors) return false + val numProjects = projects.size val produces = mutable.LinkedHashMap[Path, Task]() for (p <- projects) { @@ -168,27 +165,27 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val externalClassPath = projects.iterator.flatMap(_.classPath).filter(p => !produces.contains(p) && Files.exists(p)).toSet if (strategy != Traditional) { - val exportTimer = new Timer - exportTimer.start() - for (entry <- externalClassPath) { - val extracted = cachePath(entry) - val sourceTimeStamp = Files.getLastModifiedTime(entry) - if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { - // println(s"Skipped export of pickles from $entry to $extracted (up to date)") - } else { - PickleExtractor.process(entry, extracted) - Files.setLastModifiedTime(extracted, sourceTimeStamp) - println(s"Exported pickles from $entry to $extracted") - Files.setLastModifiedTime(extracted, sourceTimeStamp) + if (stripExternalClassPath) { + val exportTimer = new Timer + exportTimer.start() + for (entry <- externalClassPath) { + val extracted = cachePath(entry) + val sourceTimeStamp = Files.getLastModifiedTime(entry) + if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { + // println(s"Skipped export of pickles from $entry to $extracted (up to date)") + } else { + PickleExtractor.process(entry, extracted) + Files.setLastModifiedTime(extracted, sourceTimeStamp) + reporter.echo(s"Exported pickles from $entry to $extracted") + Files.setLastModifiedTime(extracted, sourceTimeStamp) + } + strippedAndExportedClassPath(entry) = extracted } - strippedAndExportedClassPath(entry) = extracted + exportTimer.stop() + reporter.echo(f"Exported external classpath in ${exportTimer.durationMs}%.0f ms") } - exportTimer.stop() - println(f"Exported external classpath in ${exportTimer.durationMs}%.0f ms") } - writeDotFile(dependsOn) - val timer = new Timer timer.start() @@ -197,9 +194,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val allFutures = projects.flatMap(_.futures) val count = allFutures.size val counter = new AtomicInteger(count) + val failed = new AtomicBoolean(false) val handler = (a: Try[_]) => a match { case f @ Failure(_) => - done.complete(f) + if (failed.compareAndSet(false, true)) { + done.complete(f) + } case Success(_) => val remaining = counter.decrementAndGet() if (remaining == 0) done.success(()) @@ -213,28 +213,28 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val allFutures: immutable.Seq[Future[_]] = projects.flatMap(_.futures) val numAllFutures = allFutures.size val awaitAllFutures: Future[_] = awaitAll(allFutures) - val numTasks = awaitAllFutures var lastNumCompleted = allFutures.count(_.isCompleted) while (true) try { Await.result(awaitAllFutures, Duration(60, "s")) timer.stop() val numCompleted = allFutures.count(_.isCompleted) - println(s"PROGRESS: $numCompleted / $numAllFutures") + reporter.echo(s"PROGRESS: $numCompleted / $numAllFutures") return } catch { case _: TimeoutException => val numCompleted = allFutures.count(_.isCompleted) if (numCompleted == lastNumCompleted) { - println(s"STALLED: $numCompleted / $numAllFutures") - println("Outline/Scala/Javac") + reporter.echo(s"STALLED: $numCompleted / $numAllFutures") + reporter.echo("Outline/Scala/Javac") projects.map { p => def toX(b: Future[_]): String = b.value match { case None => "-"; case Some(Success(_)) => "x"; case Some(Failure(_)) => "!" } val s = List(p.outlineDoneFuture, p.groupsDoneFuture, p.javaDoneFuture).map(toX).mkString(" ") - println(s + " " + p.label) + reporter.echo(s + " " + p.label) } } else { - println(s"PROGRESS: $numCompleted / $numAllFutures") + reporter.echo(s"PROGRESS: $numCompleted / $numAllFutures") + lastNumCompleted = numCompleted } } } @@ -246,7 +246,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy _ <- depsReady _ <- { val isLeaf = !dependedOn.contains(p) - if (isLeaf) { + if (isLeaf && useTraditionalForLeaf) { p.outlineDone.complete(Success(())) p.fullCompile() } else @@ -274,16 +274,17 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy if (parallelism == 1) { val criticalPath = projects.maxBy(_.regularCriticalPathMs) - println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + reporter.echo(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") } else - println(f" Wall Clock: ${timer.durationMs}%.0f ms") + reporter.echo(f" Wall Clock: ${timer.durationMs}%.0f ms") case Traditional => projects.foreach { p => val f1 = Future.traverse(dependsOn.getOrElse(p, Nil))(_.t.javaDone.future) val f2 = f1.flatMap { _ => p.outlineDone.complete(Success(())) p.fullCompile() - Future.traverse(p.groups)(_.done.future).map(_ => p.javaCompile()) + val eventualUnits: Future[List[Unit]] = Future.traverse(p.groups)(_.done.future) + eventualUnits.map(_ => p.javaCompile()) } f2.onComplete { _ => p.compiler.close() } } @@ -298,24 +299,28 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } if (parallelism == 1) { val maxFullCriticalPath: Double = projects.map(_.fullCriticalPathMs).max - println(f"Critical path: $maxFullCriticalPath%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + reporter.echo(f"Critical path: $maxFullCriticalPath%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") } else { - println(f"Wall Clock: ${timer.durationMs}%.0f ms") + reporter.echo(f"Wall Clock: ${timer.durationMs}%.0f ms") } } - writeChromeTrace(projects) + logDir.foreach { dir => + Files.createDirectories(dir) + writeDotFile(dir, dependsOn) + writeChromeTrace(dir, projects) + } deleteTempPickleCache() true } private def deleteTempPickleCache(): Unit = { - if (pickleCacheConfigured == null) { + if (configuredPickleCache.isEmpty) { AbstractFile.getDirectory(pickleCache.toFile).delete() } } - private def writeChromeTrace(projects: List[Task]) = { + private def writeChromeTrace(logDir: Path, projects: List[Task]) = { val trace = new java.lang.StringBuilder() trace.append("""{"traceEvents": [""") val sb = new mutable.StringBuilder(trace) @@ -344,9 +349,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy projects.iterator.flatMap(projectEvents).addString(sb, ",\n") trace.append("]}") - val traceFile = Paths.get(s"build-${label}.trace") + val traceFile = logDir.resolve(s"build-${label}.trace") Files.write(traceFile, trace.toString.getBytes()) - println("Chrome trace written to " + traceFile.toAbsolutePath) + reporter.echo("Chrome trace written to " + traceFile.toAbsolutePath) } case class Group(files: List[String]) { @@ -355,7 +360,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } private case class Task(argsFile: Path, command: CompilerCommand, files: List[String]) { - val label = argsFile.toString.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") + val label = argsFile.toString.replaceAll(".*/target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") override def toString: String = argsFile.toString def outputDir: Path = command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() private def expand(s: command.settings.PathSetting): List[Path] = { @@ -380,8 +385,6 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dependency.t.outlineDone.future - val cacheMacro = java.lang.Boolean.getBoolean("scala.pipeline.cache.macro.classloader") - val cachePlugin = java.lang.Boolean.getBoolean("scala.pipeline.cache.plugin.classloader") if (cacheMacro) command.settings.YcacheMacroClassLoader.value = "always" if (cachePlugin) @@ -391,6 +394,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy command.settings.YpickleJava.value = true } + val groupSize = Integer.getInteger("scala.pipeline.group.size", 128) + val groups: List[Group] = { val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) if (isScalaLibrary) { @@ -398,7 +403,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } else { command.settings.classpath.value = command.settings.outputDirs.getSingleOutput.get.toString + File.pathSeparator + command.settings.classpath.value val length = files.length - val groups = (length.toDouble / 128).toInt.max(1) + val groups = (length.toDouble / groupSize).toInt.max(1) files.grouped((length.toDouble / groups).ceil.toInt.max(1)).toList.map(Group(_)) } } @@ -438,8 +443,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy throw t } - def fullCompile(): Unit = { + command.settings.Youtline.value = false command.settings.stopAfter.value = Nil command.settings.Ymacroexpand.value = command.settings.MacroExpand.Normal @@ -451,9 +456,14 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy group.timer.start() val compiler2 = newCompiler(command.settings) try { - val run2 = new compiler2.Run() - run2 compile group.files - compiler2.reporter.finish() + try { + val run2 = new compiler2.Run() + run2 compile group.files + compiler2.reporter.finish() + } finally { + group.timer.stop() + log(f"scalac (${ix + 1}/$groupCount): done ${group.timer.durationMs}%.0f ms") + } if (compiler2.reporter.hasErrors) { group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) } else { @@ -461,9 +471,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } finally { compiler2.close() - group.timer.stop() } - log(f"scalac (${ix + 1}/$groupCount): done ${group.timer.durationMs}%.0f ms") } } } @@ -521,19 +529,42 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy log("javac: start") javaTimer.start() javaDone.completeWith(Future { - val opts = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + originalClassPath) - val compileTask = ToolProvider.getSystemJavaCompiler.getTask(null, null, null, opts, null, fileManager.getJavaFileObjects(javaSources.toArray: _*)) + val opts: java.util.List[String] = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + originalClassPath) + val compiler = ToolProvider.getSystemJavaCompiler + val listener = new DiagnosticListener[JavaFileObject] { + override def report(diagnostic: Diagnostic[_ <: JavaFileObject]): Unit = { + val msg = diagnostic.getMessage(Locale.getDefault) + val source: JavaFileObject = diagnostic.getSource + val path = Paths.get(source.toUri) + val sourceFile = new BatchSourceFile(new PlainNioFile(path)) + val position = Position.range(sourceFile, diagnostic.getStartPosition.toInt, diagnostic.getPosition.toInt, diagnostic.getEndPosition.toInt) + diagnostic.getKind match { + case Kind.ERROR => reporter.error(position, msg) + case Kind.WARNING | Kind.MANDATORY_WARNING => reporter.warning(position, msg) + case Kind.NOTE => reporter.info(position, msg, true) + case Kind.OTHER => reporter.echo(position, msg) + } + } + } + def classFilesInTarget = Files.walk(Paths.get(command.settings.outdir.value)).iterator().asScala.filter(_.getFileName.toString.endsWith(".class")).toList + log("Class files before: " + classFilesInTarget.mkString(", ")) + val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) + val compileTask = compiler.getTask(null, fileManager, listener, opts, null, fileManager.getJavaFileObjects(javaSources.toArray: _*)) compileTask.setProcessors(Collections.emptyList()) - compileTask.call() - javaTimer.stop() - log(f"javac: done ${javaTimer.durationMs}%.0f ms") + if (compileTask.call()) { + javaTimer.stop() + log(f"javac: done ${javaTimer.durationMs}%.0f ms ") + } else { + javaTimer.stop() + log(f"javac: error ${javaTimer.durationMs}%.0f ms ") + } () }) } else { javaDone.complete(Success(())) } } - def log(msg: String): Unit = println(this.label + ": " + msg) + def log(msg: String): Unit = reporter.echo(this.label + ": " + msg) } final class Timer() { @@ -579,24 +610,39 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy object PipelineMain { sealed abstract class BuildStrategy - /** Begin compilation as soon as the pickler phase is complete on all dependencies. */ + /** Transport pickles as an input to downstream compilation. */ case object Pipeline extends BuildStrategy /** Emit class files before triggering downstream compilation */ case object Traditional extends BuildStrategy - def main(args: Array[String]): Unit = { + case class PipelineSettings(label: String, parallelism: Int, strategy: BuildStrategy, useJars: Boolean, + configuredPickleCache: Option[Path], cacheMacro: Boolean, cachePlugin: Boolean, + stripExternalClassPath: Boolean, useTraditionalForLeaf: Boolean, logDir: Option[Path], + createReporter: (Settings => Reporter)) + def defaultSettings: PipelineSettings = { val strategies = List(Pipeline, Traditional) val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") + val cacheMacro = java.lang.Boolean.getBoolean("scala.pipeline.cache.macro.classloader") + val cachePlugin = java.lang.Boolean.getBoolean("scala.pipeline.cache.plugin.classloader") + val stripExternalClassPath = java.lang.Boolean.getBoolean("scala.pipeline.strip.external.classpath") + val useTraditionalForLeaf = java.lang.Boolean.getBoolean("scala.pipeline.use.traditional.for.leaf") + val configuredPickleCache = Option(System.getProperty("scala.pipeline.picklecache")).map(Paths.get(_)) + val logDir = Paths.get(".") + new PipelineSettings("1", parallelism, strategy, useJars, configuredPickleCache, + cacheMacro, cachePlugin, stripExternalClassPath, useTraditionalForLeaf, Some(logDir), new ConsoleReporter(_)) + } + + def main(args: Array[String]): Unit = { val argFiles: Seq[Path] = args match { case Array(path) if Files.isDirectory(Paths.get(path)) => Files.walk(Paths.get(path)).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList case _ => args.map(Paths.get(_)) } - val main = new PipelineMainClass("1", parallelism, strategy, argFiles, useJars) + val main = new PipelineMainClass(argFiles, defaultSettings) val result = main.process() if (!result) System.exit(1) @@ -608,10 +654,12 @@ object PipelineMain { //object PipelineMainTest { // def main(args: Array[String]): Unit = { // var i = 0 -// val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList -// for (_ <- 1 to 2; n <- List(parallel.availableProcessors); strat <- List(Pipeline)) { +//// val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList +// val argsFiles = List(Paths.get("/Users/jz/code/guardian-frontend/common/target/compile.args")) +// val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") +// for (_ <- 1 to 20; n <- List(parallel.availableProcessors); strat <- List(OutlineTypePipeline)) { // i += 1 -// val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars = false) +// val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars) // println(s"====== ITERATION $i=======") // val result = main.process() // if (!result) diff --git a/test/junit/scala/tools/nsc/DeterminismTest.scala b/test/junit/scala/tools/nsc/DeterminismTest.scala index 9f79709cca58..deadd7fa218d 100644 --- a/test/junit/scala/tools/nsc/DeterminismTest.scala +++ b/test/junit/scala/tools/nsc/DeterminismTest.scala @@ -1,20 +1,18 @@ package scala.tools.nsc -import java.io.{File, OutputStreamWriter} +import java.io.OutputStreamWriter import java.nio.charset.Charset import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} -import java.util import javax.tools.ToolProvider import org.junit.Test -import scala.collection.JavaConverters.{asScalaIteratorConverter, seqAsJavaListConverter} -import scala.collection.immutable +import scala.collection.JavaConverters.seqAsJavaListConverter import scala.language.implicitConversions import scala.reflect.internal.util.{BatchSourceFile, SourceFile} -import scala.reflect.io.PlainNioFile import scala.tools.nsc.reporters.StoreReporter +import FileUtils._ class DeterminismTest { @Test def testLambdaLift(): Unit = { @@ -328,7 +326,7 @@ class DeterminismTest { val recompileOutput = Files.createTempDirectory("recompileOutput") copyRecursive(referenceOutput, recompileOutput) compile(recompileOutput, permutation) - assert(diff(referenceOutput, recompileOutput), s"Difference detected between recompiling $permutation Run:\njardiff -r $referenceOutput $recompileOutput\n") + assertDirectorySame(referenceOutput, recompileOutput, permutation.toString) deleteRecursive(recompileOutput) } deleteRecursive(referenceOutput) @@ -336,30 +334,4 @@ class DeterminismTest { } def permutationsWithSubsets[A](as: List[A]): List[List[A]] = as.permutations.toList.flatMap(_.inits.filter(_.nonEmpty)).distinct - - private def diff(dir1: Path, dir2: Path): Boolean = { - def allFiles(dir: Path) = Files.walk(dir).iterator().asScala.map(x => (dir.relativize(x), x)).toList.filter(_._2.getFileName.toString.endsWith(".class")).sortBy(_._1.toString) - - val dir1Files = allFiles(dir1) - val dir2Files = allFiles(dir2) - val identical = dir1Files.corresponds(dir2Files) { - case ((rel1, file1), (rel2, file2)) => - rel1 == rel2 && java.util.Arrays.equals(Files.readAllBytes(file1), Files.readAllBytes(file2)) - } - identical - } - private def deleteRecursive(f: Path) = new PlainNioFile(f).delete() - private def copyRecursive(src: Path, dest: Path): Unit = { - class CopyVisitor(src: Path, dest: Path) extends SimpleFileVisitor[Path] { - override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { - Files.createDirectories(dest.resolve(src.relativize(dir))) - super.preVisitDirectory(dir, attrs) - } - override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { - Files.copy(file, dest.resolve(src.relativize(file))) - super.visitFile(file, attrs) - } - } - Files.walkFileTree(src, new CopyVisitor(src, dest)) - } } diff --git a/test/junit/scala/tools/nsc/FileUtils.scala b/test/junit/scala/tools/nsc/FileUtils.scala new file mode 100644 index 000000000000..03befd661cab --- /dev/null +++ b/test/junit/scala/tools/nsc/FileUtils.scala @@ -0,0 +1,39 @@ +package scala.tools.nsc + +import java.nio.file.attribute.BasicFileAttributes +import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} + +import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.reflect.io.PlainNioFile + +object FileUtils { + def assertDirectorySame(dir1: Path, dir2: Path, dir2Label: String): Unit = { + assert(FileUtils.diff(dir1, dir2), s"Difference detected between recompiling $dir2Label Run:\njardiff -r $dir1 $dir2\n") + } + def diff(dir1: Path, dir2: Path): Boolean = { + def allFiles(dir: Path) = Files.walk(dir).iterator().asScala.map(x => (dir.relativize(x), x)).toList.filter(_._2.getFileName.toString.endsWith(".class")).sortBy(_._1.toString) + + val dir1Files = allFiles(dir1) + val dir2Files = allFiles(dir2) + val identical = dir1Files.corresponds(dir2Files) { + case ((rel1, file1), (rel2, file2)) => + rel1 == rel2 && java.util.Arrays.equals(Files.readAllBytes(file1), Files.readAllBytes(file2)) + } + identical + } + + def deleteRecursive(f: Path) = new PlainNioFile(f).delete() + def copyRecursive(src: Path, dest: Path): Unit = { + class CopyVisitor(src: Path, dest: Path) extends SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.createDirectories(dest.resolve(src.relativize(dir))) + super.preVisitDirectory(dir, attrs) + } + override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.copy(file, dest.resolve(src.relativize(file))) + super.visitFile(file, attrs) + } + } + Files.walkFileTree(src, new CopyVisitor(src, dest)) + } +} diff --git a/test/junit/scala/tools/nsc/PipelineMainTest.scala b/test/junit/scala/tools/nsc/PipelineMainTest.scala new file mode 100644 index 000000000000..48e27aaac98a --- /dev/null +++ b/test/junit/scala/tools/nsc/PipelineMainTest.scala @@ -0,0 +1,260 @@ +package scala.tools.nsc + +import java.io.{File, IOException} +import java.nio.charset.Charset +import java.nio.file.attribute.BasicFileAttributes +import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} + +import org.junit.{After, Before, Test} + +import scala.collection.JavaConverters._ +import scala.collection.mutable +import FileUtils._ +import scala.tools.nsc.PipelineMain._ +import scala.tools.nsc.reporters.{ConsoleReporter, StoreReporter} + +class PipelineMainTest { + private var base: Path = _ + + // Enables verbose output to console to help understand what the test is doing. + private val debug = false + private var deleteBaseAfterTest = true + + @Before def before(): Unit = { + base = Files.createTempDirectory("pipelineBase") + } + + @After def after(): Unit = { + if (base != null && !debug && deleteBaseAfterTest) { + deleteRecursive(base) + } + } + + private def projectsBase = createDir(base, "projects") + + @Test def pipelineMainBuildsSeparate(): Unit = { + check(allBuilds.map(_.projects)) + } + + @Test def pipelineMainBuildsCombined(): Unit = { + check(List(allBuilds.flatMap(_.projects))) + } + + private val pipelineSettings = PipelineMain.defaultSettings.copy( + useJars = true, + parallelism = java.lang.Runtime.getRuntime.availableProcessors, + cacheMacro = true, + cachePlugin = true, + stripExternalClassPath = true, + useTraditionalForLeaf = true, + createReporter = ((s: Settings) => if (debug) new ConsoleReporter(s) else new StoreReporter()) + ) + + private def check(projectss: List[List[Build#Project]], altStrategies: List[BuildStrategy] = List(Pipeline)): Unit = { + def build(strategy: BuildStrategy): Unit = { + for (projects <- projectss) { + val argsFiles = projects.map(_.argsFile(Nil)) + val main = new PipelineMainClass(argsFiles, pipelineSettings.copy(strategy = strategy, logDir = Some(base.resolve(strategy.toString)))) + assert(main.process()) + } + } + build(Traditional) + + val reference = snapshotClasses(Traditional) + clean() + for (strategy <- altStrategies) { + build(strategy) + val recompiled = snapshotClasses(strategy) + // Bytecode should be identical regardless of compilation strategy. + deleteBaseAfterTest = false + assertDirectorySame(reference, recompiled, strategy.toString) + deleteBaseAfterTest = true + } + } + + private lazy val allBuilds = List(m1, b2, b3, b4) + + private lazy val m1: Build = { + val build = new Build(projectsBase, "m1") + val macroProject = build.project("p1") + macroProject.withSource("m1/p1/Macro.scala")( + """ + |package m1.p1 + |import reflect.macros.blackbox.Context, language.experimental._ + |object Macro { + | def m: Unit = macro impl + | def impl(c: Context): c.Tree = { + | import c.universe._ + | q"()" + | } + |} + """.stripMargin) + val internalMacroClient = build.project("internalMacroClient") + internalMacroClient.scalacOptions ++= List("-Ymacro-classpath", macroProject.out.toString) + internalMacroClient.classpath += macroProject.out + internalMacroClient.withSource("m2/p2/InternalClient.scala")( + """ + |package m1.p2 + |class InternalClient { m1.p1.Macro.m } + """.stripMargin) + build + } + + private lazy val b2: Build = { + val build = new Build(projectsBase, "b1") + val p1 = build.project("p1") + val m1P1 = m1.project("p1") + p1.classpath += m1P1.out + p1.scalacOptions ++= List("-Ymacro-classpath", m1P1.out.toString) + p1.withSource("b1/p1/ExternalClient.scala")( + """ + |package b2.p2 + |class ExternalClient { m1.p1.Macro.m } + """.stripMargin) + build + } + + private lazy val b3: Build = { + val build = new Build(projectsBase, "b3") + val p1 = build.project("p1") + p1.withSource("b3/p1/JavaDefined.java")( + """ + |package b3.p1; + |public class JavaDefined { + | ScalaJoint id(T t) { return new ScalaJoint(); } + |} + """.stripMargin) + p1.withSource("b3/p1/ScalaJoint.scala")( + """ + |package b3.p1 + |class ScalaJoint[T] { + | def foo: Unit = new JavaDefined[String] + |} + """.stripMargin) + val p2 = build.project("p2") + p2.classpath += p1.out + p2.withSource("b3/p2/JavaClient.java")( + """ + |package b3.p2; + |public class JavaClient { + | b3.p1.JavaDefined test() { return null; } + |} + """.stripMargin) + p2.withSource("b3/p2/ScalaClient.scala")( + """ + |package b3.p2 + |class ScalaClient { + | def test(): b3.p1.JavaDefined[String] = null; + |} + """.stripMargin) + build + } + + private lazy val b4: Build = { + val build = new Build(projectsBase, "b4") + val b3P1 = b3.project("p1") + val p2 = build.project("p2") + p2.classpath += b3P1.out + p2.withSource("b4/p2/JavaClient.java")( + """ + |package b4.p2; + |public class JavaClient { + | b3.p1.JavaDefined test() { return null; } + |} + """.stripMargin) + p2.withSource("b4/p2/ScalaClient.scala")( + """ + |package b4.p2 + |class ScalaClient { + | def test(): b3.p1.JavaDefined[String] = null; + |} + """.stripMargin) + build + } + + final class Build(base: Path, name: String) { + + val buildBase = createDir(base, name) + val scalacOptions = mutable.ListBuffer[String]() + final class Project(val name: String) { + def fullName: String = Build.this.name + "." + name + val base = createDir(buildBase, name) + val out = createDir(base, "target") + val src = createDir(base, "src") + val scalacOptions = mutable.ListBuffer[String]() + scalacOptions += "-usejavacp" + val classpath = mutable.ListBuffer[Path]() + val sources = mutable.ListBuffer[Path]() + def withSource(relativePath: String)(code: String): this.type = { + val srcFile = src.resolve(relativePath) + Files.createDirectories(srcFile.getParent) + Files.write(srcFile, code.getBytes(Charset.defaultCharset())) + sources += srcFile + this + } + def argsFile(extraOpts: List[String]): Path = { + val cp = if (classpath.isEmpty) Nil else List("-cp", classpath.mkString(File.pathSeparator)) + val printArgs = if (debug) List("-Xprint-args", "-") else Nil + val entries = List( + Build.this.scalacOptions.toList, + scalacOptions.toList, + extraOpts, + printArgs, + List("-d", out.toString) ::: cp ::: sources.toList.map(_.toString) + ).flatten + Files.write(out.resolve(fullName + ".args"), entries.asJava) + } + } + private val projectsMap = mutable.LinkedHashMap[String, Project]() + def projects: List[Project] = projectsMap.valuesIterator.toList + def project(name: String): Project = { + projectsMap.getOrElseUpdate(name, new Project(name)) + } + } + + private def clean(): Unit = { + class CleanVisitor() extends SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + if (dir.getFileName.toString == "target") { + deleteRecursive(dir) + Files.createDirectories(dir) + FileVisitResult.SKIP_SUBTREE + } else super.preVisitDirectory(dir, attrs) + } + } + Files.walkFileTree(projectsBase, new CleanVisitor()) + } + private def snapshotClasses(strategy: BuildStrategy): Path = { + val src = projectsBase + val dest = createDir(base, strategy.toString + "/classes") + class CopyVisitor(src: Path, dest: Path) extends SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.createDirectories(dest.resolve(src.relativize(dir))) + super.preVisitDirectory(dir, attrs) + } + + override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = { + val destDir = dest.resolve(src.relativize(dir)) + val listing = Files.list(destDir) + try { + if (!listing.iterator().hasNext) + Files.delete(destDir) + } finally { + listing.close() + } + super.postVisitDirectory(dir, exc) + } + override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.copy(file, dest.resolve(src.relativize(file))) + super.visitFile(file, attrs) + } + } + Files.walkFileTree(src, new CopyVisitor(src, dest)) + dest + } + + private def createDir(dir: Path, s: String): Path = { + val subDir = dir.resolve(s) + Files.createDirectories(subDir) + } +} From 3cfb31cad259f668060abb5e8668f476b1d059c5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 22 May 2019 14:04:56 +1000 Subject: [PATCH 17/17] Add experimental support for outline typing In this new mode, the RHS of definitions is only typechecked if the definition lacks an explicit type ascription, or or it may contain a super call that is compiled to a trait super accessor. Refer to the new test case for a motivating example. --- .../scala/tools/nsc/PipelineMain.scala | 73 ++++++++++++++++++- .../tools/nsc/typechecker/Analyzer.scala | 12 +-- .../scala/tools/nsc/typechecker/Typers.scala | 12 ++- .../scala/tools/nsc/PipelineMainTest.scala | 43 ++++++++++- 4 files changed, 127 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 463e8ef3b488..917973b2fde0 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -34,7 +34,7 @@ import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success, Try} -import PipelineMain.{Pipeline, Traditional} +import PipelineMain.{OutlineTypePipeline, Pipeline, Traditional} class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.PipelineSettings) { import pipelineSettings._ @@ -239,6 +239,43 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe } } strategy match { + case OutlineTypePipeline => + projects.foreach { p: Task => + val depsReady = Future.traverse(dependsOn.getOrElse(p, Nil))(task => p.dependencyReadyFuture(task)) + val f = for { + _ <- depsReady + _ <- { + p.outlineCompile() + p.outlineDone.future + } + _ <- { + p.fullCompile() + Future.traverse(p.groups)(_.done.future) + } + } yield { + p.javaCompile() + } + f.onComplete { _ => p.compiler.close() } + } + + awaitDone() + + for (p <- projects) { + val dependencies = dependsOn(p).map(_.t) + + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) + p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs + p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + + if (parallelism == 1) { + val criticalPath = projects.maxBy(_.regularCriticalPathMs) + reporter.echo(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else + reporter.echo(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => projects.foreach { p => val depsReady = Future.traverse(dependsOn.getOrElse(p, Nil))(task => p.dependencyReadyFuture(task)) @@ -332,7 +369,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe def projectEvents(p: Task): List[String] = { val events = List.newBuilder[String] if (p.outlineTimer.durationMicros > 0d) { - val desc = "parser-to-pickler" + val desc = if (strategy == OutlineTypePipeline) "outline-type" else "parser-to-pickler" events += durationEvent(p.label, desc, p.outlineTimer) events += durationEvent(p.label, "pickle-export", p.pickleExportTimer) } @@ -398,7 +435,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe val groups: List[Group] = { val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) - if (isScalaLibrary) { + if (strategy != OutlineTypePipeline || isScalaLibrary) { Group(files) :: Nil } else { command.settings.classpath.value = command.settings.outputDirs.getSingleOutput.get.toString + File.pathSeparator + command.settings.classpath.value @@ -443,6 +480,32 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe throw t } + def outlineCompile(): Unit = { + outlineTimer.start() + try { + log("scalac outline: start") + command.settings.Youtline.value = true + command.settings.stopAfter.value = List("pickler") + command.settings.Ymacroexpand.value = command.settings.MacroExpand.None + val run1 = new compiler.Run() + run1 compile files + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, run1.symData) + outlineTimer.stop() + reporter.finish() + if (reporter.hasErrors) { + log("scalac outline: failed") + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } else { + log(f"scala outline: done ${outlineTimer.durationMs}%.0f ms") + outlineDone.complete(Success(())) + } + } catch { + case t: Throwable => + t.printStackTrace() + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } + } + def fullCompile(): Unit = { command.settings.Youtline.value = false command.settings.stopAfter.value = Nil @@ -610,6 +673,8 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe object PipelineMain { sealed abstract class BuildStrategy + /** Outline type check sources to compute type signatures an input to downstream compilation. Compile sources (optionally */ + case object OutlineTypePipeline extends BuildStrategy /** Transport pickles as an input to downstream compilation. */ case object Pipeline extends BuildStrategy @@ -621,7 +686,7 @@ object PipelineMain { stripExternalClassPath: Boolean, useTraditionalForLeaf: Boolean, logDir: Option[Path], createReporter: (Settings => Reporter)) def defaultSettings: PipelineSettings = { - val strategies = List(Pipeline, Traditional) + val strategies = List(OutlineTypePipeline, Pipeline, Traditional) val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index b068e43d1ad4..bc5ffd0ccd7c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -112,11 +112,13 @@ trait Analyzer extends AnyRef try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) - for (workItem <- unit.toCheck) workItem() - if (settings.warnUnusedImport) - warnUnusedImports(unit) - if (settings.warnUnused.isSetByUser) - new checkUnused(typer).apply(unit) + if (!settings.Youtline.value) { + for (workItem <- unit.toCheck) workItem() + if (settings.warnUnusedImport) + warnUnusedImports(unit) + if (settings.warnUnused.isSetByUser) + new checkUnused(typer).apply(unit) + } } finally { unit.toCheck.clear() diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 0a3002d04c80..79086ab03bb9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5945,14 +5945,21 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = { lookupTransformed(tree) match { case Some(tree1) => tree1 - case _ => typed(tree, mode, pt) + case _ => if (canSkipRhs(tree)) EmptyTree else typed(tree, mode, pt) } } final def lookupTransformed(tree: Tree): Option[Tree] = if (phase.erasedTypes) None // OPT save the hashmap lookup in erasure type and beyond else transformed remove tree - } + private final def canSkipRhs(tree: Tree) = settings.Youtline.value && !tree.exists { + case Super(qual, mix) => + // conservative approximation of method bodies that may give rise to super accessors which must be + // stored in pickle. + context.owner.enclClass.isTrait || mix != tpnme.EMPTY + case _ => false + } + } /** Finish computation of param aliases after typechecking is completed */ final def finishComputeParamAlias(): Unit = { @@ -5981,6 +5988,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } superConstructorCalls.clear() } + } trait TypersStats { diff --git a/test/junit/scala/tools/nsc/PipelineMainTest.scala b/test/junit/scala/tools/nsc/PipelineMainTest.scala index 48e27aaac98a..e3e6a81fc921 100644 --- a/test/junit/scala/tools/nsc/PipelineMainTest.scala +++ b/test/junit/scala/tools/nsc/PipelineMainTest.scala @@ -40,6 +40,12 @@ class PipelineMainTest { check(List(allBuilds.flatMap(_.projects))) } + @Test def pipelineMainBuildsJavaAccessor(): Unit = { + // Tests the special case in Typer:::canSkipRhs to make outline typing descend into method bodies might + // give rise to super accssors + check(List(b5SuperAccessor.projects), altStrategies = List(OutlineTypePipeline)) + } + private val pipelineSettings = PipelineMain.defaultSettings.copy( useJars = true, parallelism = java.lang.Runtime.getRuntime.availableProcessors, @@ -50,7 +56,7 @@ class PipelineMainTest { createReporter = ((s: Settings) => if (debug) new ConsoleReporter(s) else new StoreReporter()) ) - private def check(projectss: List[List[Build#Project]], altStrategies: List[BuildStrategy] = List(Pipeline)): Unit = { + private def check(projectss: List[List[Build#Project]], altStrategies: List[BuildStrategy] = List(Pipeline, OutlineTypePipeline)): Unit = { def build(strategy: BuildStrategy): Unit = { for (projects <- projectss) { val argsFiles = projects.map(_.argsFile(Nil)) @@ -72,7 +78,7 @@ class PipelineMainTest { } } - private lazy val allBuilds = List(m1, b2, b3, b4) + private lazy val allBuilds = List(m1, b2, b3, b4, b5SuperAccessor) private lazy val m1: Build = { val build = new Build(projectsBase, "m1") @@ -172,6 +178,39 @@ class PipelineMainTest { build } + private lazy val b5SuperAccessor: Build = { + val build = new Build(projectsBase, "b5") + val p1 = build.project("p1") + p1.withSource("b5/p1/JavaProtectedMethod.java")( + """ + |package b5.p1; + |public class JavaProtectedMethod { + | protected String foo() { return "JavaProtectedMethod.foo"; } + |} + """.stripMargin) + p1.withSource("b5/p1/NeedSuperAccessor.scala")( + """ + |package b5.p1 + |trait NeedSuperAccessor extends JavaProtectedMethod { + | protected override def foo = "NeedSuperAccessor.foo" + | class Inner { + | def test: Any = { + | NeedSuperAccessor.super[JavaProtectedMethod].foo + | } + | } + |} + """.stripMargin) + val p2 = build.project("p2") + p2.classpath += p1.out + p2.withSource("b5/p2/ScalaSub.scala")( + """ + |package b5.p2 + |class ScalaSub extends b5.p1.NeedSuperAccessor { + |} + """.stripMargin) + build + } + final class Build(base: Path, name: String) { val buildBase = createDir(base, name)