Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Index: src/reflect/scala/reflect/internal/util/cache/NodeJ.java
- IDEA additional info:
- Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
- <+>UTF-8
- ===================================================================
- --- src/reflect/scala/reflect/internal/util/cache/NodeJ.java (date 1555277045250)
- +++ src/reflect/scala/reflect/internal/util/cache/NodeJ.java (date 1555277045250)
- @@ -0,0 +1,12 @@
- +package scala.reflect.internal.util.cache;
- +
- +public abstract class NodeJ {
- + final String key;
- + volatile NodeJ next;
- +
- + protected NodeJ(String key, NodeJ next) {
- + this.key = key;
- + this.next = next;
- + }
- +}
- +
- Index: src/reflect/scala/reflect/internal/util/cache/NodeInterner.scala
- IDEA additional info:
- Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
- <+>UTF-8
- ===================================================================
- --- src/reflect/scala/reflect/internal/util/cache/NodeInterner.scala (date 1555283690107)
- +++ src/reflect/scala/reflect/internal/util/cache/NodeInterner.scala (date 1555283690107)
- @@ -0,0 +1,254 @@
- +package scala.reflect.internal.util.cache
- +
- +import java.util.concurrent.atomic.{AtomicInteger, AtomicLong, AtomicReferenceArray}
- +import java.util.concurrent.atomic.AtomicReference
- +
- +import scala.annotation.tailrec
- +
- +abstract class Node(val key: String, @volatile private[cache] var next: Node) extends AnyRef
- +
- +//abstract class NodeInterner[T <: Node] {
- +// final def size = approxSize.get
- +// protected def createNode(key: String, node: T): T
- +//
- +// final def insertOrFind(key: String): T = {
- +// val hash = key.hashCode
- +// var oldTail: Node = null
- +//
- +// var node: Node = null
- +// do {
- +// //deliberately hiding this.data
- +// val data = initial()
- +// val bucket = hash & (data.length - 1)
- +// val head = data.get(bucket)
- +// node = head
- +// while ((node ne null) &&
- +// // we have already checked the tail
- +// (node ne oldTail) &&
- +// // its not equal. HashCode is cheap for strings and a good discriminant
- +// (node.key.hashCode() != hash || node.key != key))
- +// node = node.next
- +// if (node eq oldTail) node = null
- +// if (node eq null) {
- +// // minor optimisation - we can skip this tail if we have to retry
- +// // if we came to the end of the chain of nodes we dont need to search the same tail if we fail and try again
- +// oldTail = head
- +// val newNode = createNode(key, head.asInstanceOf[T])
- +// if (data.compareAndSet(bucket, head, newNode) &&
- +// // volatile read to ensure that we have not grown in another thread
- +// (data eq this.data.get)) {
- +// afterInsert(data)
- +// node = newNode
- +// }
- +// } else if (
- +// // volatile read to ensure that we have not grown in another thread
- +// data ne this.data.get) {
- +// node = null
- +// }
- +// } while (node eq null)
- +// node.asInstanceOf[T]
- +// }
- +// final def contains(key: String): Boolean = {
- +// getExistingImpl(key) ne null
- +// }
- +// final def get(key: String): Option[T] = {
- +// Option(getExistingImpl(key).asInstanceOf[T])
- +// }
- +// final def getExistingImpl(key: String): Node = {
- +// val data = initial
- +// val list = data.get(key.hashCode & (data.length() -1))
- +// getExistingImpl(list,key)
- +// }
- +// @tailrec private def getExistingImpl(list: Node, key: String): Node = {
- +// if (list eq null) null
- +// else if (list.key == key) list
- +// else getExistingImpl(list.next, key)
- +// }
- +//
- +// /**
- +// * get the root of data
- +// *
- +// * @return
- +// */
- +// private def initial(): AtomicReferenceArray[Node] = {
- +// //volatile read
- +// var result = data.get()
- +// //null indicates it is in the process of being rehashed
- +// //updates are applied with synchronisation lock on data
- +// if (result eq null) data.synchronized {
- +// //when we have the lock we can guarantee that the other threads rehash is complete
- +// result = data.get()
- +// assert(result ne null)
- +// }
- +// result
- +// }
- +//
- +// /**
- +// * rehash and grow
- +// */
- +// private def afterInsert(data: AtomicReferenceArray[Node]): Unit = {
- +// val newSize = approxSize.incrementAndGet()
- +// val origLength = data.length
- +// if (origLength < newSize) {
- +// this.data.synchronized {
- +// // if the value has changed already then its not our problem
- +// if (this.data.compareAndSet(data, null)) {
- +// val size = origLength * 2
- +// val mask = origLength
- +// val newData = new AtomicReferenceArray[Node](size)
- +//
- +// var head0: Node = null
- +// var head1: Node = null
- +// var sourceIdx = 0
- +// while (sourceIdx < origLength) {
- +// head0 = null
- +// head1 = null
- +// var tail0: Node = null
- +// var tail1: Node = null
- +// var sourceNode = data.get(sourceIdx)
- +// while (sourceNode ne null) {
- +// if ((sourceNode.key.hashCode & mask) == 0) {
- +// if (head0 eq null) head0 = sourceNode
- +// else tail0.next = sourceNode
- +// tail0 = sourceNode
- +// } else {
- +// if (head1 eq null) head1 = sourceNode
- +// else tail1.next = sourceNode
- +// tail1 = sourceNode
- +// }
- +// sourceNode = sourceNode.next
- +// }
- +// if (tail0 ne null) tail0.next = null
- +// if (tail1 ne null) tail1.next = null
- +// newData.set(sourceIdx, head0)
- +// newData.set(sourceIdx + mask, head1)
- +// sourceIdx += 1
- +// }
- +// this.data.set(newData)
- +// }
- +// }
- +// }
- +// }
- +//
- +//
- +// private[this] val approxSize = new AtomicInteger
- +// private[this] final val data = new AtomicReference(new AtomicReferenceArray[Node](1024))
- +//
- +//}
- +
- +abstract class NodeInterner[T <: Node] {
- + final def size = approxSize.get
- + protected def createNode(key: String, node: T): T
- +
- + final def insertOrFind(key: String): T = {
- + val hash = key.hashCode
- + var oldTail: Node = null
- +
- + var node: Node = null
- + do {
- + //deliberately hiding this.data
- + val data = initial()
- + val bucket = hash & (data.length - 1)
- + val head = data(bucket)
- + node = head
- + while ((node ne null) &&
- + // we have already checked the tail
- + (node ne oldTail) &&
- + // its not equal. HashCode is cheap for strings and a good discriminant
- + (node.key.hashCode() != hash || node.key != key))
- + node = node.next
- + if (node eq oldTail) node = null
- + if (node eq null) {
- + // minor optimisation - we can skip this tail if we have to retry
- + // if we came to the end of the chain of nodes we dont need to search the same tail if we fail and try again
- + oldTail = head
- + val newNode = createNode(key, head.asInstanceOf[T])
- +
- + data.update(bucket, newNode)
- + afterInsert(data)
- + node = newNode
- + } else if (
- + // volatile read to ensure that we have not grown in another thread
- + data ne this.data) {
- + node = null
- + }
- + } while (node eq null)
- + node.asInstanceOf[T]
- + }
- + final def contains(key: String): Boolean = {
- + getExistingImpl(key) ne null
- + }
- + final def get(key: String): Option[T] = {
- + Option(getExistingImpl(key).asInstanceOf[T])
- + }
- + final def getExistingImpl(key: String): Node = {
- + val data = initial
- + val list = data(key.hashCode & (data.length -1))
- + getExistingImpl(list,key)
- + }
- + @tailrec private def getExistingImpl(list: Node, key: String): Node = {
- + if (list eq null) null
- + else if (list.key == key) list
- + else getExistingImpl(list.next, key)
- + }
- +
- + /**
- + * get the root of data
- + *
- + * @return
- + */
- + private def initial(): Array[Node] = {
- +data }
- +
- + /**
- + * rehash and grow
- + */
- + private def afterInsert(x: Array[Node]): Unit = {
- + var data = x
- + val newSize = approxSize.incrementAndGet()
- + val origLength = data.length
- + if (origLength < newSize) {
- + this.data.synchronized {
- + // if the value has changed already then its not our problem
- + data = null
- + val size = origLength * 2
- + val mask = origLength
- + val newData = new Array[Node](size)
- +
- + var head0: Node = null
- + var head1: Node = null
- + var sourceIdx = 0
- + while (sourceIdx < origLength) {
- + head0 = null
- + head1 = null
- + var tail0: Node = null
- + var tail1: Node = null
- + var sourceNode = data(sourceIdx)
- + while (sourceNode ne null) {
- + if ((sourceNode.key.hashCode & mask) == 0) {
- + if (head0 eq null) head0 = sourceNode
- + else tail0.next = sourceNode
- + tail0 = sourceNode
- + } else {
- + if (head1 eq null) head1 = sourceNode
- + else tail1.next = sourceNode
- + tail1 = sourceNode
- + }
- + sourceNode = sourceNode.next
- + }
- + if (tail0 ne null) tail0.next = null
- + if (tail1 ne null) tail1.next = null
- + newData.update(sourceIdx, head0)
- + newData.update(sourceIdx + mask, head1)
- + sourceIdx += 1
- + }
- + this.data = newData }
- +
- + }
- + }
- +
- +
- + private[this] val approxSize = new AtomicInteger
- + private[this] final var data = new Array[Node](1024)
- +
- +}
- Index: test/benchmarks/src/main/scala/scala/reflect/internal/NameLookupBenchmark.scala
- IDEA additional info:
- Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
- <+>UTF-8
- ===================================================================
- --- test/benchmarks/src/main/scala/scala/reflect/internal/NameLookupBenchmark.scala (date 1555281893594)
- +++ test/benchmarks/src/main/scala/scala/reflect/internal/NameLookupBenchmark.scala (date 1555281893594)
- @@ -0,0 +1,128 @@
- +package scala.reflect.internal
- +import java.util.Collections
- +import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
- +
- +import org.openjdk.jmh.annotations._
- +import org.openjdk.jmh.infra.Blackhole
- +
- +import scala.reflect.internal.util.cache._
- +
- +//@Threads(4)
- +//@BenchmarkMode(Array(org.openjdk.jmh.annotations.Mode.Throughput))
- +//class NameLookupBenchmark4Threads extends NameLookupBenchmark
- +//@Threads(2)
- +//@BenchmarkMode(Array(org.openjdk.jmh.annotations.Mode.Throughput))
- +//class NameLookupBenchmark2Threads extends NameLookupBenchmark
- +
- +@BenchmarkMode(Array(org.openjdk.jmh.annotations.Mode.Throughput))
- +@Fork(1)
- +@Threads(1)
- +@Warmup(iterations = 3)
- +@Measurement(iterations = 10)
- +@State(Scope.Benchmark)
- +class NameLookupBenchmark {
- + var map : ConcurrentHashMap[String, TestNode] = _
- + var cache : NodeInterner[TestNode]= _
- + var cacheJ : NodeJInterner[TestNodeJ]= _
- + var data : Array[String]= _
- + @Setup(Level.Trial) def setup(): Unit = {
- + map = new ConcurrentHashMap[String, TestNode]()
- + cache = new CacheImpl
- + cacheJ = new CacheImplJ
- + data = Array.tabulate[String](10000)(_.toString)
- + Collections.shuffle(java.util.Arrays.asList(data))
- + }
- +
- + @Benchmark
- + def chmGet2(bh: Blackhole): Unit = chmGet(bh)
- + @Benchmark
- + def chmGet(bh: Blackhole): Unit = {
- + var i = 0
- + val d = data
- + while (i < d.length) {
- + val key = data(i)
- + var found = map.get(key)
- + if (found eq null) {
- + val x = new TestNode(key,null)
- + found = map.put(key,x)
- + if (found eq null) found = x
- + }
- + bh.consume(found)
- + i += 1
- + }
- + }
- +// @Benchmark
- +// def chmGetExisting(bh: Blackhole): Unit = {
- +// var i = 0
- +// val d = data
- +// while (i < d.length) {
- +// val key = data(i)
- +// var found = map.get(key)
- +// if (found eq null) {
- +// val x = new TestNode(key,null)
- +// found = map.put(key,x)
- +// if (found eq null) found = x
- +// }
- +// bh.consume(found)
- +// i += 1
- +// }
- +// }
- +
- +// @Benchmark
- +// def chmCompute(bh: Blackhole): Unit = {
- +// var i = 0
- +// val d = data
- +// while (i < d.length) {
- +// val key = data(i)
- +// val found = map.computeIfAbsent(key, new TestNode(_, null))
- +// bh.consume(found)
- +// i += 1
- +// }
- +// }
- +
- + @Benchmark
- + def lookup(bh: Blackhole): Unit = {
- + var i = 0
- + val d = data
- + while (i < d.length) {
- + val key = data(i)
- + val found = cache.insertOrFind(key)
- + bh.consume(found)
- + i += 1
- + }
- + }
- + @Benchmark
- + def lookupJ(bh: Blackhole): Unit = {
- + var i = 0
- + val d = data
- + while (i < d.length) {
- + val key = data(i)
- + val found = cacheJ.insertOrFind(key)
- + bh.consume(found)
- + i += 1
- + }
- + }
- +// @Benchmark
- +// def lookupExisting(bh: Blackhole): Unit = {
- +// var i = 0
- +// val d = data
- +// while (i < d.length) {
- +// val key = data(i)
- +// val found = cache.insertOrFind(key)
- +// bh.consume(found)
- +// i += 1
- +// }
- +// }
- +}
- +class CacheImpl extends NodeInterner[TestNode]{
- +
- + override def createNode(key: String, next: TestNode)= new TestNode(key,next){}
- +}
- +class TestNode(key: String, next: Node) extends Node(key,next)
- +
- +class CacheImplJ extends NodeJInterner[TestNodeJ]{
- +
- + override protected def createNodeJ(key: String, next: NodeJ) = new TestNodeJ(key,next){}
- +}
- +class TestNodeJ(key: String, next: NodeJ) extends NodeJ(key,next)
- +
- Index: src/reflect/scala/reflect/internal/util/cache/NodeJInterner.java
- IDEA additional info:
- Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
- <+>UTF-8
- ===================================================================
- --- src/reflect/scala/reflect/internal/util/cache/NodeJInterner.java (date 1555279948323)
- +++ src/reflect/scala/reflect/internal/util/cache/NodeJInterner.java (date 1555279948323)
- @@ -0,0 +1,136 @@
- +package scala.reflect.internal.util.cache;
- +
- +import java.util.concurrent.atomic.AtomicInteger;
- +import java.util.concurrent.atomic.AtomicReference;
- +import java.util.concurrent.atomic.AtomicReferenceArray;
- +
- +public abstract class NodeJInterner<T extends NodeJ> {
- +
- + public final int size() {
- + return approxSize.get();
- + }
- +
- + protected abstract T createNodeJ(String key, NodeJ node);
- +
- + public final T insertOrFind(String key) {
- + int hash = key.hashCode();
- + NodeJ oldTail = null;
- +
- + NodeJ node;
- + do {
- + //deliberately hiding this.data
- + AtomicReferenceArray<NodeJ> data = initial();
- + int bucket = hash & (data.length() - 1);
- + NodeJ head = data.get(bucket);
- + node = head;
- + while ((node != null) &&
- + // we have already checked the tail
- + (node != oldTail) &&
- + // its not equal. HashCode is cheap for strings and a good discriminant
- + (node.key.hashCode() != hash || !node.key.equals(key)))
- + node = node.next;
- + if (node == oldTail) node = null;
- + if (node == null) {
- + // minor optimisation - we can skip this tail if we have to retry
- + // if we came to the end of the chain of nodes we dont need to search the same tail if we fail and try again
- + oldTail = head;
- + NodeJ newNodeJ = (T) createNodeJ(key, head);
- + if (data.compareAndSet(bucket, head, newNodeJ) &&
- + // volatile read to ensure that we have not grown in another thread
- + (data == this.data.get())) {
- + afterInsert(data);
- + node = newNodeJ;
- + }
- + } else if (
- + // volatile read to ensure that we have not grown in another thread
- + data != this.data.get()) {
- + node = null;
- + }
- + } while (node == null);
- + return (T) node;
- + }
- +//final def contains(key: String): Boolean = {
- +// getExistingImpl(key) != null
- +// }
- +//final def get(key: String): Option[T] = {
- +// Option(getExistingImpl(key).asInstanceOf[T])
- +// }
- +//final def getExistingImpl(key: String): NodeJ = {
- +// val data = initial
- +// val list = data.get(key.hashCode & (data.length() -1))
- +// getExistingImpl(list,key)
- +// }
- +//@tailrec private def getExistingImpl(list: NodeJ, key: String): NodeJ = {
- +// if (list == null) null
- +// else if (list.key == key) list
- +// else getExistingImpl(list.next, key)
- +// }
- +//
- +
- + /**
- + * get the root of data
- + *
- + * @return
- + */
- + private AtomicReferenceArray<NodeJ> initial() {
- + //volatile read
- + AtomicReferenceArray<NodeJ> result = data.get();
- + //null indicates it is in the process of being rehashed
- + //updates are applied with synchronisation lock on data
- + if (result == null) synchronized (data) {
- + //when we have the lock we can guarantee that the other threads rehash is complete
- + result = data.get();
- + assert result != null;
- + }
- + return result;
- + }
- +
- + /**
- + * rehash and grow
- + */
- + private void afterInsert(AtomicReferenceArray<NodeJ> data) {
- + int newSize = approxSize.incrementAndGet();
- + int origLength = data.length();
- + if (origLength < (newSize * 2)) {
- + synchronized (this.data) {
- + // if the value has changed already then its not our problem
- + if (this.data.compareAndSet(data, null)) {
- + int size = origLength * 2;
- + int mask = origLength;
- + AtomicReferenceArray<NodeJ> newData = new AtomicReferenceArray<NodeJ>(size);
- +
- + NodeJ head0, head1;
- + int sourceIdx = 0;
- + while (sourceIdx < origLength) {
- + head0 = head1 = null;
- + NodeJ tail0 = null, tail1 = null;
- + NodeJ sourceNode = data.get(sourceIdx);
- + while (sourceNode != null) {
- + if ((sourceNode.key.hashCode() & mask) == 0) {
- + if (head0 == null) head0 = sourceNode;
- + else tail0.next = sourceNode;
- + tail0 = sourceNode;
- + } else {
- + if (head1 == null) head1 = sourceNode;
- + else tail1.next = sourceNode;
- + tail1 = sourceNode;
- + }
- + sourceNode = sourceNode.next;
- + }
- + if (tail0 != null) tail0.next = null;
- + if (tail1 != null) tail1.next = null;
- + newData.set(sourceIdx, head0);
- + newData.set(sourceIdx + mask, head1);
- + sourceIdx += 1;
- + }
- + this.data.set(newData);
- + }
- + }
- + }
- + }
- +
- +
- + private final AtomicInteger approxSize = new AtomicInteger();
- + private final AtomicReference<AtomicReferenceArray<NodeJ>> data = new AtomicReference<>(new AtomicReferenceArray<NodeJ>(1024));
- +
- +}
- Index: test/junit/scala/tools/nsc/util/NameLookupTest.scala
- IDEA additional info:
- Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
- <+>UTF-8
- ===================================================================
- --- test/junit/scala/tools/nsc/util/NameLookupTest.scala (date 1555277860512)
- +++ test/junit/scala/tools/nsc/util/NameLookupTest.scala (date 1555277860512)
- @@ -0,0 +1,103 @@
- +package scala.tools.nsc.util
- +
- +import java.util
- +import java.util.Collections
- +import java.util.concurrent.ConcurrentHashMap
- +
- +import org.junit._
- +import Assert._
- +
- +import scala.reflect.internal.util.cache.{Node, NodeInterner, NodeJ, NodeJInterner}
- +import scala.util.Random
- +
- +object foo extends App
- +class NameLookupTest {
- + val cache : NodeInterner[TestNode] = new CacheImpl
- + val cacheJ : NodeJInterner[TestNodeJ] = new CacheImplJ
- + val data : Array[String]= Array.tabulate[String](10000)(_.toString)
- + Collections.shuffle(java.util.Arrays.asList(data))
- +
- + @Test def size(): Unit = {
- + assertEquals(0, cache.size)
- + val f1 = cache.insertOrFind("foo")
- + assertEquals(1, cache.size)
- + val f2 = cache.insertOrFind("foo")
- + assertSame(f1,f2)
- + assertEquals(1, cache.size)
- + val f3 = cache.insertOrFind(new String("foo"))
- + assertSame(f1,f3)
- + }
- +
- + @Test def insertAll(): Unit = {
- + import scala.collection.JavaConverters._
- + val all = new util.IdentityHashMap[TestNode, String]()
- + val random = new Random()
- + for (i <- 1 to data.length * 10) {
- + val r = random.nextInt(data.length)
- + val key = data(r)
- + val cached = cache.insertOrFind(key)
- + val existing = all.put(cached, key)
- + assertTrue((existing eq null) || existing == key)
- + }
- + for (i <- 0 until data.length) {
- + val key = data(i)
- + val existing = all.put(cache.insertOrFind(key), key)
- + assertTrue((existing eq null) || existing == key)
- + }
- + val bad = all.asScala groupBy {_._2} filter {_._2.size > 1}
- + assertEquals(0, bad.size)
- + assertEquals(data.length, cache.size)
- + }
- + @Test def sizeJ(): Unit = {
- + assertEquals(0, cacheJ.size)
- + val f1 = cacheJ.insertOrFind("foo")
- + assertEquals(1, cacheJ.size)
- + val f2 = cacheJ.insertOrFind("foo")
- + assertSame(f1,f2)
- + assertEquals(1, cacheJ.size)
- + val f3 = cacheJ.insertOrFind(new String("foo"))
- + assertSame(f1,f3)
- + }
- +
- + @Test def insertAllJ(): Unit = {
- + import scala.collection.JavaConverters._
- + val all = new util.IdentityHashMap[TestNodeJ, String]()
- + val random = new Random()
- + for (i <- 1 to data.length * 10) {
- + val r = random.nextInt(data.length)
- + val key = data(r)
- + val cacheJd = cacheJ.insertOrFind(key)
- + val existing = all.put(cacheJd, key)
- + assertTrue((existing eq null) || existing == key)
- + }
- + for (i <- 0 until data.length) {
- + val key = data(i)
- + val existing = all.put(cacheJ.insertOrFind(key), key)
- + assertTrue((existing eq null) || existing == key)
- + }
- + val bad = all.asScala groupBy {_._2} filter {_._2.size > 1}
- + assertEquals(0, bad.size)
- + assertEquals(data.length, cacheJ.size)
- + }
- +
- +}
- +
- +
- +class CacheImpl extends NodeInterner[TestNode]{
- + val created = new util.HashSet[String]()
- + override def createNode(key: String, next: TestNode): TestNode = {
- + assert (created.add(key), key)
- + new TestNode(key,next){}
- + }
- +}
- +class TestNode(key: String, next: Node) extends Node(key,next)
- +
- +class CacheImplJ extends NodeJInterner[TestNodeJ]{
- + val created = new util.HashSet[String]()
- +
- + override protected def createNodeJ(key: String, node: NodeJ): TestNodeJ = {
- + assert (created.add(key), key)
- + new TestNodeJ(key,node){}
- + }
- +}
- +class TestNodeJ(key: String, next: NodeJ) extends NodeJ(key,next)
- \ No newline at end of file
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement