diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Changes.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Changes.scala index 53944b8a40..c516567e03 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Changes.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Changes.scala @@ -88,7 +88,10 @@ final case class ModifiedNames(names: Set[UsedName]) { names.flatMap(n => n.scopes.asScala.map(n.name -> _)) def isModified(usedName: UsedName): Boolean = - usedName.scopes.asScala.exists(scope => lookupMap.contains(usedName.name -> scope)) + usedName.scopes.asScala.exists(scope => isModifiedRaw(usedName.name, scope)) + + def isModifiedRaw(name: String, scope: UseScope): Boolean = + lookupMap.contains(name -> scope) override def toString: String = s"ModifiedNames(changes = ${names.mkString(", ")})" diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala index 7e70bf7ce1..89cc667aa7 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala @@ -16,19 +16,20 @@ package inc import java.io.File import java.nio.file.{ Files, Path, Paths } import java.util.{ EnumSet, UUID } -import java.util.concurrent.atomic.{ AtomicBoolean } +import java.util.concurrent.atomic.AtomicBoolean import sbt.internal.inc.Analysis.{ LocalProduct, NonLocalProduct } import sbt.internal.inc.JavaInterfaceUtil.EnrichOption import sbt.util.{ InterfaceUtil, Level, Logger } import sbt.util.InterfaceUtil.{ jo2o, t2 } + import scala.collection.JavaConverters._ +import scala.collection.mutable import scala.util.control.NonFatal import xsbti.{ FileConverter, Position, Problem, Severity, UseScope, VirtualFile, VirtualFileRef } import xsbt.api.{ APIUtil, HashAPI, NameHashing } import xsbti.api._ import xsbti.compile.{ AnalysisContents, - AnalysisStore => XAnalysisStore, CompileAnalysis, CompileProgress, DependencyChanges, @@ -36,6 +37,7 @@ import xsbti.compile.{ MiniOptions, MiniSetup, Output, + AnalysisStore => XAnalysisStore, ClassFileManager => XClassFileManager } import xsbti.compile.analysis.{ ReadStamps, Stamp => XStamp } @@ -626,7 +628,7 @@ private final class AnalysisCallback( private[this] val objectApis = new TrieMap[String, ApiInfo] private[this] val classPublicNameHashes = new TrieMap[String, Array[NameHash]] private[this] val objectPublicNameHashes = new TrieMap[String, Array[NameHash]] - private[this] val usedNames = new RelationBuilder[String, UsedName] + private[this] val usedNames = new TrieMap[String, mutable.Set[UsedName]] private[this] val unreporteds = new TrieMap[VirtualFileRef, ConcurrentLinkedQueue[Problem]] private[this] val reporteds = new TrieMap[VirtualFileRef, ConcurrentLinkedQueue[Problem]] private[this] val mainClasses = new TrieMap[VirtualFileRef, ConcurrentLinkedQueue[String]] @@ -862,10 +864,12 @@ private final class AnalysisCallback( () } - def usedName(className: String, name: String, useScopes: EnumSet[UseScope]) = - usedNames.synchronized { - usedNames(className) = UsedName.make(name, useScopes) - } + def usedName(className: String, name: String, useScopes: EnumSet[UseScope]) = { + usedNames + .getOrElseUpdate(className, ConcurrentHashMap.newKeySet[UsedName].asScala) + .add(UsedName.make(name, useScopes)) + () + } override def enabled(): Boolean = options.enabled @@ -905,11 +909,15 @@ private final class AnalysisCallback( } def getOrNil[A, B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten + def addCompilation(base: Analysis): Analysis = base.copy(compilations = base.compilations.add(compilation)) + def addUsedNames(base: Analysis): Analysis = { - assert(base.relations.names.size == 0) - base.copy(relations = base.relations.addUsedNames(usedNames.result())) + assert(base.relations.names.isEmpty) + base.copy( + relations = base.relations.addUsedNames(UsedNames.fromMultiMap(usedNames)) + ) } private def companionsWithHash(className: String): (Companions, HashAPI.Hash, HashAPI.Hash) = { diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/MemberRefInvalidator.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/MemberRefInvalidator.scala index 31699749f6..43a3b26dbb 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/MemberRefInvalidator.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/MemberRefInvalidator.scala @@ -58,7 +58,7 @@ private[inc] class MemberRefInvalidator(log: Logger, logRecompileOnMacro: Boolea private final val NoInvalidation = (_: String) => Set.empty[String] def get( memberRef: Relation[String, String], - usedNames: Relation[String, UsedName], + usedNames: Relations.UsedNames, apiChange: APIChange, isScalaClass: String => Boolean ): String => Set[String] = apiChange match { @@ -121,7 +121,7 @@ private[inc] class MemberRefInvalidator(log: Logger, logRecompileOnMacro: Boolea } private class NameHashFilteredInvalidator( - usedNames: Relation[String, UsedName], + usedNames: Relations.UsedNames, memberRef: Relation[String, String], modifiedNames: ModifiedNames, isScalaClass: String => Boolean @@ -131,17 +131,18 @@ private[inc] class MemberRefInvalidator(log: Logger, logRecompileOnMacro: Boolea val dependent = memberRef.reverse(to) filteredDependencies(dependent) } + private def filteredDependencies(dependent: Set[String]): Set[String] = { dependent.filter { case from if isScalaClass(from) => - val affectedNames = usedNames.forward(from).filter(modifiedNames.isModified) - if (affectedNames.isEmpty) { + if (!usedNames.hasAffectedNames(modifiedNames, from)) { log.debug( s"None of the modified names appears in source file of $from. This dependency is not being considered for invalidation." ) false } else { - log.debug(s"The following modified names cause invalidation of $from: $affectedNames") + val affectedNames = usedNames.affectedNames(modifiedNames, from) + log.debug(s"The following modified names cause invalidation of $from: [$affectedNames]") true } case from => diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/Relations.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/Relations.scala index 4a2b807ce6..4309ef3918 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/Relations.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/Relations.scala @@ -80,8 +80,6 @@ trait Relations { /** Internal source dependencies that depend on external source file `dep`. This includes both direct and inherited dependencies. */ def usesExternal(className: String): Set[String] - private[inc] def usedNames(className: String): Set[UsedName] - /** * Records that the file `src` generates products `products`, has internal dependencies `internalDeps`, * has external dependencies `externalDeps` and library dependencies `libraryDeps`. @@ -140,7 +138,7 @@ trait Relations { deps: Iterable[(VirtualFileRef, String, XStamp)] ): Relations - private[inc] def addUsedNames(data: Relation[String, UsedName]): Relations + private[inc] def addUsedNames(data: Relations.UsedNames): Relations /** Concatenates the two relations. Acts naively, i.e., doesn't internalize external deps on added files. */ def ++(o: Relations): Relations @@ -267,7 +265,7 @@ trait Relations { /** * Relation between source files and _unqualified_ term and type names used in given source file. */ - private[inc] def names: Relation[String, UsedName] + private[inc] def names: Relations.UsedNames private[inc] def copy( srcProd: Relation[VirtualFileRef, VirtualFileRef] = srcProd, @@ -276,12 +274,13 @@ trait Relations { internalDependencies: InternalDependencies = internalDependencies, externalDependencies: ExternalDependencies = externalDependencies, classes: Relation[VirtualFileRef, String] = classes, - names: Relation[String, UsedName] = names, + names: Relations.UsedNames = names, productClassName: Relation[String, String] = productClassName, ): Relations } object Relations { + type UsedNames = inc.UsedNames /** Tracks internal and external source dependencies for a specific dependency type, such as direct or inherited.*/ private[inc] final class ClassDependencies( @@ -318,7 +317,7 @@ object Relations { internalDependencies = InternalDependencies.empty, externalDependencies = ExternalDependencies.empty, classes = Relation.empty, - names = Relation.empty, + names = UsedNames.fromMultiMap(Map.empty), productClassName = Relation.empty ) @@ -329,7 +328,7 @@ object Relations { internalDependencies: InternalDependencies, externalDependencies: ExternalDependencies, classes: Relation[VirtualFileRef, String], - names: Relation[String, UsedName], + names: Relations.UsedNames, productClassName: Relation[String, String] ): Relations = new MRelationsNameHashing( @@ -348,6 +347,7 @@ object Relations { external: Relation[String, String] ): ClassDependencies = new ClassDependencies(internal, external) + } private[inc] object DependencyCollection { @@ -368,7 +368,7 @@ private[inc] object DependencyCollection { private[inc] object InternalDependencies { /** - * Constructs an empty `InteralDependencies` + * Constructs an empty `InternalDependencies` */ def empty = InternalDependencies(Map.empty) } @@ -470,7 +470,7 @@ private class MRelationsNameHashing( val internalDependencies: InternalDependencies, val externalDependencies: ExternalDependencies, val classes: Relation[VirtualFileRef, String], - val names: Relation[String, UsedName], + val names: Relations.UsedNames, val productClassName: Relation[String, String] ) extends Relations { def allSources: collection.Set[VirtualFileRef] = srcProd._1s @@ -501,8 +501,6 @@ private class MRelationsNameHashing( def externalDeps(className: String): Set[String] = externalClassDep.forward(className) def usesExternal(className: String): Set[String] = externalClassDep.reverse(className) - private[inc] def usedNames(className: String): Set[UsedName] = names.forward(className) - def addProducts(src: VirtualFileRef, products: Iterable[VirtualFileRef]): Relations = new MRelationsNameHashing( srcProd ++ products.map(p => (src, p)), @@ -563,7 +561,7 @@ private class MRelationsNameHashing( productClassName, ) - private[inc] def addUsedNames(data: Relation[String, UsedName]): Relations = { + private[inc] def addUsedNames(data: Relations.UsedNames): Relations = { new MRelationsNameHashing( srcProd, libraryDep, @@ -571,7 +569,7 @@ private class MRelationsNameHashing( internalDependencies, externalDependencies, classes, - names = if (names.forwardMap.isEmpty) data else names ++ data, + names = if (names.isEmpty) data else names ++ data, productClassName, ) } @@ -626,7 +624,7 @@ private class MRelationsNameHashing( internalDependencies: InternalDependencies = internalDependencies, externalDependencies: ExternalDependencies = externalDependencies, classes: Relation[VirtualFileRef, String] = classes, - names: Relation[String, UsedName] = names, + names: Relations.UsedNames = names, productClassName: Relation[String, String] = productClassName, ): Relations = new MRelationsNameHashing( srcProd, @@ -657,6 +655,13 @@ private class MRelationsNameHashing( if (r.forwardMap.isEmpty) "Relation [ ]" else r.all.toSeq.map(kv => line_s(kv._1, kv._2)).sorted.mkString("Relation [\n", "", "]") } + def usedNames_s(r: Relations.UsedNames) = { + if (r.isEmpty) "UsedNames [ ]" + else { + val all = r.iterator.flatMap { case (a, bs) => bs.iterator.map(b => (a, b)) } + all.map(kv => line_s(kv._1, kv._2)).toSeq.sorted.mkString("UsedNames [\n", "", "]") + } + } override def toString: String = { def deps_s(m: Map[_, Relation[_, _]]) = @@ -671,7 +676,7 @@ private class MRelationsNameHashing( | internalDependencies: ${deps_s(internalDependencies.dependencies)} | externalDependencies: ${deps_s(externalDependencies.dependencies)} | class names: ${relation_s(classes)} - | used names: ${relation_s(names)} + | used names: ${usedNames_s(names)} | product class names: ${relation_s(productClassName)} """.trim.stripMargin } diff --git a/internal/zinc-core/src/main/scala/sbt/internal/inc/UsedName.scala b/internal/zinc-core/src/main/scala/sbt/internal/inc/UsedName.scala index bb75608995..756d205d7e 100644 --- a/internal/zinc-core/src/main/scala/sbt/internal/inc/UsedName.scala +++ b/internal/zinc-core/src/main/scala/sbt/internal/inc/UsedName.scala @@ -11,16 +11,17 @@ package sbt.internal.inc +import java.{ util => ju } +import scala.{ collection => sc } import xsbti.compile.{ UsedName => XUsedName } import xsbti.UseScope -case class UsedName private (name: String, scopes: java.util.EnumSet[UseScope]) extends XUsedName { +case class UsedName private (name: String, scopes: ju.EnumSet[UseScope]) extends XUsedName { override def getName: String = name - override def getScopes: java.util.EnumSet[UseScope] = scopes + override def getScopes: ju.EnumSet[UseScope] = scopes } object UsedName { - def apply(name: String, scopes: Iterable[UseScope] = Nil): UsedName = { val useScopes = java.util.EnumSet.noneOf(classOf[UseScope]) scopes.foreach(useScopes.add) @@ -39,3 +40,114 @@ object UsedName { name } } + +sealed abstract class UsedNames private { + def isEmpty: Boolean + def toMultiMap: sc.Map[String, sc.Set[UsedName]] + + def ++(other: UsedNames): UsedNames + def --(classes: Iterable[String]): UsedNames + def iterator: Iterator[(String, sc.Set[UsedName])] + + def hasAffectedNames(modifiedNames: ModifiedNames, from: String): Boolean + def affectedNames(modifiedNames: ModifiedNames, from: String): String +} + +object UsedNames { + def fromJavaMap(map: ju.Map[String, Schema.UsedNames]) = JavaUsedNames(map) + def fromMultiMap(map: sc.Map[String, sc.Set[UsedName]]) = ScalaUsedNames(map) + + final case class ScalaUsedNames(map: sc.Map[String, sc.Set[UsedName]]) extends UsedNames { + def isEmpty = map.isEmpty + def toMultiMap = map + def ++(other: UsedNames) = fromMultiMap(map ++ other.iterator) + def --(classes: Iterable[String]) = fromMultiMap(map -- classes) + def iterator = map.iterator + def hasAffectedNames(modifiedNames: ModifiedNames, from: String): Boolean = + map(from).iterator.exists(modifiedNames.isModified) + def affectedNames(modifiedNames: ModifiedNames, from: String): String = + map(from).iterator.filter(modifiedNames.isModified).mkString(", ") + } + + final case class JavaUsedNames(map: ju.Map[String, Schema.UsedNames]) extends UsedNames { + + import scala.collection.JavaConverters._ + + private def fromUseScope(useScope: Schema.UseScope, id: Int): UseScope = useScope match { + case Schema.UseScope.DEFAULT => UseScope.Default + case Schema.UseScope.IMPLICIT => UseScope.Implicit + case Schema.UseScope.PATMAT => UseScope.PatMatTarget + case Schema.UseScope.UNRECOGNIZED => + sys.error(s"Unrecognized ${classOf[Schema.UseScope].getName} with value `$id`.") + } + + private def fromUsedName(usedName: Schema.UsedName): UsedName = { + val name = usedName.getName.intern() // ? + val useScopes = ju.EnumSet.noneOf(classOf[UseScope]) + val len = usedName.getScopesCount + for (i <- 0 to len - 1) + useScopes.add(fromUseScope(usedName.getScopes(i), usedName.getScopesValue(i))) + UsedName.make(name, useScopes) + } + + private def fromUsedNamesMap(map: ju.Map[String, Schema.UsedNames]) = + for ((k, used) <- map.asScala) + yield k -> used.getUsedNamesList.asScala.iterator.map(fromUsedName).toSet + + lazy val toMultiMap: sc.Map[String, sc.Set[UsedName]] = fromUsedNamesMap(map) + private lazy val convert: UsedNames = fromMultiMap(toMultiMap) + + def isEmpty = map.isEmpty + + def ++(other: UsedNames) = convert ++ other + + def --(classes: Iterable[String]) = convert -- classes + + def iterator = convert.iterator + + def hasAffectedNames(modifiedNames: ModifiedNames, from: String): Boolean = { + val usedNames = map.get(from) + var i = 0 + val n = usedNames.getUsedNamesCount + while (i < n) { + val usedName = usedNames.getUsedNames(i) + val name = usedName.getName + var i2 = 0 + val n2 = usedName.getScopesCount + while (i2 < n2) { + val scope = fromUseScope(usedName.getScopes(i2), usedName.getScopesValue(i2)) + if (modifiedNames.isModifiedRaw(name, scope)) { + return true + } + i2 += 1 + } + i += 1 + } + false + } + + def affectedNames(modifiedNames: ModifiedNames, from: String): String = { + val b = new StringBuilder() + val usedNames = map.get(from) + var first = true + var i = 0 + val n = usedNames.getUsedNamesCount + while (i < n) { + val usedName = usedNames.getUsedNames(i) + val name = usedName.getName + var i2 = 0 + val n2 = usedName.getScopesCount + while (i2 < n2) { + val scope = fromUseScope(usedName.getScopes(i2), usedName.getScopesValue(i2)) + if (modifiedNames.isModifiedRaw(name, scope)) { + if (first) first = false else b.append(", ") + b.append(name) + } + i2 += 1 + } + i += 1 + } + b.toString + } + } +} diff --git a/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufReaders.scala b/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufReaders.scala index 35b62f0207..c649fc63c2 100644 --- a/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufReaders.scala +++ b/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufReaders.scala @@ -12,8 +12,7 @@ package sbt.internal.inc.binary.converters import java.nio.file.{ Path, Paths } -import java.util -import java.util.{ List => JList, Map => JMap } +import java.util.{ List => JList, Map => JMap, HashMap => JHashMap } import sbt.internal.inc.Relations.ClassDependencies import sbt.internal.inc._ import sbt.internal.inc.binary.converters.ProtobufDefaults.EmptyLazyCompanions @@ -25,6 +24,7 @@ import sbt.internal.inc.binary.converters.ProtobufDefaults.Feedback.StringToExce import sbt.internal.inc.binary.converters.ProtobufDefaults.Feedback.{ Readers => ReadersFeedback } import sbt.internal.inc.binary.converters.ProtobufDefaults.{ Classes, ReadersConstants } import sbt.internal.util.Relation + import scala.collection.JavaConverters._ import xsbti.api._ import ProtobufDefaults.{ MissingInt, MissingString } @@ -629,7 +629,8 @@ final class ProtobufReaders(mapper: ReadMapper, currentVersion: Schema.Version) } def fromAnalyzedClass( - shouldStoreApis: Boolean + shouldStoreApis: Boolean, + stringTable: StringTable )(analyzedClass: Schema.AnalyzedClass): AnalyzedClass = { def fromCompanions(companions: Schema.Companions): Companions = { def expected(msg: String) = ReadersFeedback.expected(msg, Classes.Companions) @@ -643,7 +644,7 @@ final class ProtobufReaders(mapper: ReadMapper, currentVersion: Schema.Version) } def fromNameHash(nameHash: Schema.NameHash): NameHash = { - val name = nameHash.getName.intern() + val name = stringTable.lookupOrEnter(nameHash.getName) val hash = nameHash.getHash val scope = fromUseScope(nameHash.getScope, nameHash.getScopeValue) NameHash.of(name, scope, hash) @@ -701,33 +702,6 @@ final class ProtobufReaders(mapper: ReadMapper, currentVersion: Schema.Version) new ClassDependencies(internal, external) } - def fromUsedName(usedName: Schema.UsedName): UsedName = { - val name = usedName.getName.intern() - val useScopes = util.EnumSet.noneOf(classOf[UseScope]) - val len = usedName.getScopesCount - for { - i <- 0 to len - 1 - } { - useScopes.add(fromUseScope(usedName.getScopes(i), usedName.getScopesValue(i))) - } - UsedName.make(name, useScopes) - } - - def fromUsedNamesMap( - map: java.util.Map[String, Schema.UsedNames] - ): Relation[String, UsedName] = { - val builder = new RelationBuilder[String, UsedName] - for ((k, used) <- map.asScala) { - val usedNames = used.getUsedNamesList.asScala - if (!usedNames.isEmpty) { - for (schemaUsedName <- usedNames) { - builder(k) = fromUsedName(schemaUsedName) - } - } - } - builder.result() - } - def expected(msg: String) = ReadersFeedback.expected(msg, Classes.Relations) val srcProd = fromMap(relations.getSrcProdMap, stringToSource, stringToProd) @@ -746,7 +720,7 @@ final class ProtobufReaders(mapper: ReadMapper, currentVersion: Schema.Version) val classes = fromMap(relations.getClassesMap, stringToSource, stringId) val productClassName = fromMap(relations.getProductClassNameMap, stringId, stringId) - val names = fromUsedNamesMap(relations.getNamesMap) + val names = UsedNames.fromJavaMap(relations.getNamesMap) val internal = InternalDependencies( Map( DependencyContext.DependencyByMemberRef -> memberRef.internal, @@ -774,13 +748,14 @@ final class ProtobufReaders(mapper: ReadMapper, currentVersion: Schema.Version) } def fromApis(shouldStoreApis: Boolean)(apis: Schema.APIs): APIs = { + val stringTable = new StringTable val internal = apis.getInternalMap.asScala.iterator.map { - case (k, v) => k -> fromAnalyzedClass(shouldStoreApis)(v) + case (k, v) => k -> fromAnalyzedClass(shouldStoreApis, stringTable)(v) }.toMap val external = apis.getExternalMap.asScala.iterator.map { - case (k, v) => k -> fromAnalyzedClass(shouldStoreApis)(v) + case (k, v) => k -> fromAnalyzedClass(shouldStoreApis, stringTable)(v) }.toMap APIs(internal = internal, external = external) } @@ -825,4 +800,13 @@ final class ProtobufReaders(mapper: ReadMapper, currentVersion: Schema.Version) else s"The mini setup from format ${version} could not be read.".!! (analysis, miniSetup, version) } + private class StringTable { + private val strings = new JHashMap[String, String]() + def lookupOrEnter(string: String): String = { + strings.putIfAbsent(string, string) match { + case null => string + case v => v + } + } + } } diff --git a/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala b/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala index 9208df10f4..046b3b37a0 100644 --- a/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala +++ b/internal/zinc-persist/src/main/scala/sbt/internal/inc/binary/converters/ProtobufWriters.scala @@ -653,10 +653,8 @@ final class ProtobufWriters(mapper: WriteMapper) { builder.build } - def toUsedNamesMap( - relation: Relation[String, UsedName] - ): Iterator[(String, Schema.UsedNames)] = { - relation.forwardMap.iterator.map { + def toUsedNamesMap(map: Relations.UsedNames): Iterator[(String, Schema.UsedNames)] = { + map.iterator.map { case (k, names) => val builder = Schema.UsedNames.newBuilder names.foreach(name => builder.addUsedNames(toUsedName(name))) diff --git a/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/RelationsTextFormat.scala b/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/RelationsTextFormat.scala index ddf93e7e5a..46aea4c035 100644 --- a/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/RelationsTextFormat.scala +++ b/internal/zinc-persist/src/main/scala/sbt/internal/inc/text/RelationsTextFormat.scala @@ -13,7 +13,7 @@ package sbt.internal.inc.text import java.io.{ BufferedReader, Writer } -import sbt.internal.inc.{ ExternalDependencies, InternalDependencies, Relations, UsedName } +import sbt.internal.inc.{ ExternalDependencies, InternalDependencies, Relations, UsedNames } import sbt.internal.util.Relation import xsbti.VirtualFileRef import xsbti.api.DependencyContext._ @@ -26,94 +26,79 @@ trait RelationsTextFormat extends FormatCommons { private case class Descriptor[A, B]( header: String, - selectCorresponding: Relations => Relation[A, B], + selectCorresponding: Relations => scala.collection.Map[A, scala.collection.Set[B]], keyMapper: Mapper[A], valueMapper: Mapper[B] ) + private def descriptor[A, B]( + header: String, + rels: Relations => Relation[A, B], + keyMapper: Mapper[A], + valueMapper: Mapper[B] + ) = + Descriptor(header, rels.andThen(_.forwardMap), keyMapper, valueMapper) + private def stringsDescriptor(header: String, rels: Relations => Relation[String, String]) = - Descriptor(header, rels, Mapper.forString, Mapper.forString) + descriptor(header, rels, Mapper.forString, Mapper.forString) private val allRelations: List[Descriptor[_, _]] = { List( - Descriptor("products", _.srcProd, sourcesMapper, productsMapper), - Descriptor("library dependencies", _.libraryDep, sourcesMapper, binariesMapper), - Descriptor("library class names", _.libraryClassName, binariesMapper, Mapper.forString), + descriptor("products", _.srcProd, sourcesMapper, productsMapper), + descriptor("library dependencies", _.libraryDep, sourcesMapper, binariesMapper), + descriptor("library class names", _.libraryClassName, binariesMapper, Mapper.forString), stringsDescriptor("member reference internal dependencies", _.memberRef.internal), stringsDescriptor("member reference external dependencies", _.memberRef.external), stringsDescriptor("inheritance internal dependencies", _.inheritance.internal), stringsDescriptor("inheritance external dependencies", _.inheritance.external), stringsDescriptor("local internal inheritance dependencies", _.localInheritance.internal), stringsDescriptor("local external inheritance dependencies", _.localInheritance.external), - Descriptor("class names", _.classes, sourcesMapper, Mapper.forString), - Descriptor("used names", _.names, Mapper.forString, Mapper.forUsedName), + descriptor("class names", _.classes, sourcesMapper, Mapper.forString), + Descriptor("used names", _.names.toMultiMap, Mapper.forString, Mapper.forUsedName), stringsDescriptor("product class names", _.productClassName) ) } protected object RelationsF { - def write(out: Writer, relations: Relations): Unit = { - def writeRelation[A, B](relDesc: Descriptor[A, B], relations: Relations): Unit = { - // This ordering is used to persist all values in order. Since all values will be - // persisted using their string representation, it makes sense to sort them using - // their string representation. - val toStringOrdA = new Ordering[A] { - def compare(a: A, b: A) = relDesc.keyMapper.write(a) compare relDesc.keyMapper.write(b) - } - val toStringOrdB = new Ordering[B] { - def compare(a: B, b: B) = - relDesc.valueMapper.write(a) compare relDesc.valueMapper.write(b) - } - val header = relDesc.header - val rel = relDesc.selectCorresponding(relations) + def writeRelation[A, B](relDesc: Descriptor[A, B]): Unit = { + import relDesc._ + val map = selectCorresponding(relations) writeHeader(out, header) - writeSize(out, rel.size) + writeSize(out, map.valuesIterator.flatten.size) // We sort for ease of debugging and for more efficient reconstruction when reading. // Note that we don't share code with writeMap. Each is implemented more efficiently // than the shared code would be, and the difference is measurable on large analyses. - rel.forwardMap.toSeq.sortBy(_._1)(toStringOrdA).foreach { - case (k, vs) => - val kStr = relDesc.keyMapper.write(k) - vs.toSeq.sorted(toStringOrdB) foreach { v => - out.write(kStr); out.write(" -> "); out.write(relDesc.valueMapper.write(v)); - out.write("\n") - } + val kvs = map.iterator.map { case (k, vs) => keyMapper.write(k) -> vs }.toSeq.sortBy(_._1) + for ((k, vs) <- kvs; v <- vs.iterator.map(valueMapper.write).toSeq.sorted) { + out.write(k); out.write(" -> "); out.write(v); out.write("\n") } } - - allRelations.foreach { relDesc => - writeRelation(relDesc, relations) - } + allRelations.foreach(writeRelation(_)) } def read(in: BufferedReader): Relations = { - def readRelation[A, B](relDesc: Descriptor[A, B]): Relation[A, B] = { - val expectedHeader = relDesc.header - val items = - readPairs(in)(expectedHeader, relDesc.keyMapper.read, relDesc.valueMapper.read).toIterator - // Reconstruct the forward map. This is more efficient than Relation.empty ++ items. - var forward: List[(A, Set[B])] = Nil - var currentItem: (A, B) = null - var currentKey: A = null.asInstanceOf[A] - var currentVals: List[B] = Nil - def closeEntry(): Unit = { - if (currentKey != null) forward = (currentKey, currentVals.toSet) :: forward - currentKey = currentItem._1 - currentVals = currentItem._2 :: Nil - } + def readRelation[A, B](relDesc: Descriptor[A, B]): Map[A, Set[B]] = { + import relDesc._ + val items = readPairs(in)(header, keyMapper.read, valueMapper.read).toIterator + // Reconstruct the multi-map efficiently, using the writing strategy above + val builder = Map.newBuilder[A, Set[B]] + var currentKey = null.asInstanceOf[A] + var currentVals = Set.newBuilder[B] + def closeEntry() = if (currentKey != null) builder += ((currentKey, currentVals.result())) while (items.hasNext) { - currentItem = items.next() - if (currentItem._1 == currentKey) currentVals = currentItem._2 :: currentVals - else closeEntry() + val (key, value) = items.next() + if (key == currentKey) currentVals += value + else { + closeEntry() + currentKey = key + currentVals = Set.newBuilder[B] += value + } } - if (currentItem != null) closeEntry() - Relation.reconstruct(forward.toMap) + closeEntry() + builder.result() } - - val relations = allRelations.map(rd => readRelation(rd)) - - construct(relations) + construct(allRelations.map(readRelation(_))) } } @@ -121,39 +106,35 @@ trait RelationsTextFormat extends FormatCommons { * Reconstructs a Relations from a list of Relation * The order in which the relations are read matters and is defined by `existingRelations`. */ - private def construct(relations: List[Relation[_, _]]) = + private def construct(relations: List[Map[_, Set[_]]]) = relations match { case p :: bin :: lcn :: mri :: mre :: ii :: ie :: lii :: lie :: cn :: un :: bcn :: Nil => - val srcProd = p.asInstanceOf[Relation[VirtualFileRef, VirtualFileRef]] - val libraryDep = bin.asInstanceOf[Relation[VirtualFileRef, VirtualFileRef]] - val libraryClassName = lcn.asInstanceOf[Relation[VirtualFileRef, String]] - val classes = cn.asInstanceOf[Relation[VirtualFileRef, String]] - val names = un.asInstanceOf[Relation[String, UsedName]] - val binaryClassName = bcn.asInstanceOf[Relation[String, String]] + def toMultiMap[K, V](m: Map[_, _]): Map[K, Set[V]] = m.asInstanceOf[Map[K, Set[V]]] + def toRelation[K, V](m: Map[_, _]): Relation[K, V] = Relation.reconstruct(toMultiMap(m)) val internal = InternalDependencies( Map( - DependencyByMemberRef -> mri.asInstanceOf[Relation[String, String]], - DependencyByInheritance -> ii.asInstanceOf[Relation[String, String]], - LocalDependencyByInheritance -> lii.asInstanceOf[Relation[String, String]] + DependencyByMemberRef -> toRelation(mri), + DependencyByInheritance -> toRelation(ii), + LocalDependencyByInheritance -> toRelation(lii), ) ) val external = ExternalDependencies( Map( - DependencyByMemberRef -> mre.asInstanceOf[Relation[String, String]], - DependencyByInheritance -> ie.asInstanceOf[Relation[String, String]], - LocalDependencyByInheritance -> lie.asInstanceOf[Relation[String, String]] + DependencyByMemberRef -> toRelation(mre), + DependencyByInheritance -> toRelation(ie), + LocalDependencyByInheritance -> toRelation(lie), ) ) Relations.make( - srcProd, - libraryDep, - libraryClassName, + toRelation(p), + toRelation(bin), + toRelation(lcn), internal, external, - classes, - names, - binaryClassName + toRelation(cn), + UsedNames.fromMultiMap(toMultiMap(un)), + toRelation(bcn), ) case _ => throw new java.io.IOException( diff --git a/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisGenerators.scala b/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisGenerators.scala index 65638b0962..d338ee7122 100644 --- a/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisGenerators.scala +++ b/internal/zinc-persist/src/test/scala/sbt/inc/AnalysisGenerators.scala @@ -188,9 +188,9 @@ object AnalysisGenerators { for (name <- namesGen; scopes <- Gen.someOf(UseScope.values())) yield UsedName(name, UseScope.Default +: scopes) - def genUsedNames(classNames: Seq[String]): Gen[Relation[String, UsedName]] = + def genUsedNames(classNames: Seq[String]): Gen[Relations.UsedNames] = for (allNames <- listOfN(classNames.length, containerOf[Set, UsedName](genUsedName()))) - yield Relation.reconstruct(zipMap(classNames, allNames)) + yield UsedNames.fromMultiMap(zipMap(classNames, allNames)) def genFileVRef: Gen[VirtualFileRef] = genFile.map(x => VirtualFileRef.of(x.toPath.toString))