Advertisement
Guest User

Untitled

a guest
Apr 25th, 2019
89
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 22.81 KB | None | 0 0
  1. Index: src/reflect/scala/reflect/internal/util/cache/NodeJ.java
  2. IDEA additional info:
  3. Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
  4. <+>UTF-8
  5. ===================================================================
  6. --- src/reflect/scala/reflect/internal/util/cache/NodeJ.java (date 1555277045250)
  7. +++ src/reflect/scala/reflect/internal/util/cache/NodeJ.java (date 1555277045250)
  8. @@ -0,0 +1,12 @@
  9. +package scala.reflect.internal.util.cache;
  10. +
  11. +public abstract class NodeJ {
  12. + final String key;
  13. + volatile NodeJ next;
  14. +
  15. + protected NodeJ(String key, NodeJ next) {
  16. + this.key = key;
  17. + this.next = next;
  18. + }
  19. +}
  20. +
  21. Index: src/reflect/scala/reflect/internal/util/cache/NodeInterner.scala
  22. IDEA additional info:
  23. Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
  24. <+>UTF-8
  25. ===================================================================
  26. --- src/reflect/scala/reflect/internal/util/cache/NodeInterner.scala (date 1555283690107)
  27. +++ src/reflect/scala/reflect/internal/util/cache/NodeInterner.scala (date 1555283690107)
  28. @@ -0,0 +1,254 @@
  29. +package scala.reflect.internal.util.cache
  30. +
  31. +import java.util.concurrent.atomic.{AtomicInteger, AtomicLong, AtomicReferenceArray}
  32. +import java.util.concurrent.atomic.AtomicReference
  33. +
  34. +import scala.annotation.tailrec
  35. +
  36. +abstract class Node(val key: String, @volatile private[cache] var next: Node) extends AnyRef
  37. +
  38. +//abstract class NodeInterner[T <: Node] {
  39. +// final def size = approxSize.get
  40. +// protected def createNode(key: String, node: T): T
  41. +//
  42. +// final def insertOrFind(key: String): T = {
  43. +// val hash = key.hashCode
  44. +// var oldTail: Node = null
  45. +//
  46. +// var node: Node = null
  47. +// do {
  48. +// //deliberately hiding this.data
  49. +// val data = initial()
  50. +// val bucket = hash & (data.length - 1)
  51. +// val head = data.get(bucket)
  52. +// node = head
  53. +// while ((node ne null) &&
  54. +// // we have already checked the tail
  55. +// (node ne oldTail) &&
  56. +// // its not equal. HashCode is cheap for strings and a good discriminant
  57. +// (node.key.hashCode() != hash || node.key != key))
  58. +// node = node.next
  59. +// if (node eq oldTail) node = null
  60. +// if (node eq null) {
  61. +// // minor optimisation - we can skip this tail if we have to retry
  62. +// // if we came to the end of the chain of nodes we dont need to search the same tail if we fail and try again
  63. +// oldTail = head
  64. +// val newNode = createNode(key, head.asInstanceOf[T])
  65. +// if (data.compareAndSet(bucket, head, newNode) &&
  66. +// // volatile read to ensure that we have not grown in another thread
  67. +// (data eq this.data.get)) {
  68. +// afterInsert(data)
  69. +// node = newNode
  70. +// }
  71. +// } else if (
  72. +// // volatile read to ensure that we have not grown in another thread
  73. +// data ne this.data.get) {
  74. +// node = null
  75. +// }
  76. +// } while (node eq null)
  77. +// node.asInstanceOf[T]
  78. +// }
  79. +// final def contains(key: String): Boolean = {
  80. +// getExistingImpl(key) ne null
  81. +// }
  82. +// final def get(key: String): Option[T] = {
  83. +// Option(getExistingImpl(key).asInstanceOf[T])
  84. +// }
  85. +// final def getExistingImpl(key: String): Node = {
  86. +// val data = initial
  87. +// val list = data.get(key.hashCode & (data.length() -1))
  88. +// getExistingImpl(list,key)
  89. +// }
  90. +// @tailrec private def getExistingImpl(list: Node, key: String): Node = {
  91. +// if (list eq null) null
  92. +// else if (list.key == key) list
  93. +// else getExistingImpl(list.next, key)
  94. +// }
  95. +//
  96. +// /**
  97. +// * get the root of data
  98. +// *
  99. +// * @return
  100. +// */
  101. +// private def initial(): AtomicReferenceArray[Node] = {
  102. +// //volatile read
  103. +// var result = data.get()
  104. +// //null indicates it is in the process of being rehashed
  105. +// //updates are applied with synchronisation lock on data
  106. +// if (result eq null) data.synchronized {
  107. +// //when we have the lock we can guarantee that the other threads rehash is complete
  108. +// result = data.get()
  109. +// assert(result ne null)
  110. +// }
  111. +// result
  112. +// }
  113. +//
  114. +// /**
  115. +// * rehash and grow
  116. +// */
  117. +// private def afterInsert(data: AtomicReferenceArray[Node]): Unit = {
  118. +// val newSize = approxSize.incrementAndGet()
  119. +// val origLength = data.length
  120. +// if (origLength < newSize) {
  121. +// this.data.synchronized {
  122. +// // if the value has changed already then its not our problem
  123. +// if (this.data.compareAndSet(data, null)) {
  124. +// val size = origLength * 2
  125. +// val mask = origLength
  126. +// val newData = new AtomicReferenceArray[Node](size)
  127. +//
  128. +// var head0: Node = null
  129. +// var head1: Node = null
  130. +// var sourceIdx = 0
  131. +// while (sourceIdx < origLength) {
  132. +// head0 = null
  133. +// head1 = null
  134. +// var tail0: Node = null
  135. +// var tail1: Node = null
  136. +// var sourceNode = data.get(sourceIdx)
  137. +// while (sourceNode ne null) {
  138. +// if ((sourceNode.key.hashCode & mask) == 0) {
  139. +// if (head0 eq null) head0 = sourceNode
  140. +// else tail0.next = sourceNode
  141. +// tail0 = sourceNode
  142. +// } else {
  143. +// if (head1 eq null) head1 = sourceNode
  144. +// else tail1.next = sourceNode
  145. +// tail1 = sourceNode
  146. +// }
  147. +// sourceNode = sourceNode.next
  148. +// }
  149. +// if (tail0 ne null) tail0.next = null
  150. +// if (tail1 ne null) tail1.next = null
  151. +// newData.set(sourceIdx, head0)
  152. +// newData.set(sourceIdx + mask, head1)
  153. +// sourceIdx += 1
  154. +// }
  155. +// this.data.set(newData)
  156. +// }
  157. +// }
  158. +// }
  159. +// }
  160. +//
  161. +//
  162. +// private[this] val approxSize = new AtomicInteger
  163. +// private[this] final val data = new AtomicReference(new AtomicReferenceArray[Node](1024))
  164. +//
  165. +//}
  166. +
  167. +abstract class NodeInterner[T <: Node] {
  168. + final def size = approxSize.get
  169. + protected def createNode(key: String, node: T): T
  170. +
  171. + final def insertOrFind(key: String): T = {
  172. + val hash = key.hashCode
  173. + var oldTail: Node = null
  174. +
  175. + var node: Node = null
  176. + do {
  177. + //deliberately hiding this.data
  178. + val data = initial()
  179. + val bucket = hash & (data.length - 1)
  180. + val head = data(bucket)
  181. + node = head
  182. + while ((node ne null) &&
  183. + // we have already checked the tail
  184. + (node ne oldTail) &&
  185. + // its not equal. HashCode is cheap for strings and a good discriminant
  186. + (node.key.hashCode() != hash || node.key != key))
  187. + node = node.next
  188. + if (node eq oldTail) node = null
  189. + if (node eq null) {
  190. + // minor optimisation - we can skip this tail if we have to retry
  191. + // if we came to the end of the chain of nodes we dont need to search the same tail if we fail and try again
  192. + oldTail = head
  193. + val newNode = createNode(key, head.asInstanceOf[T])
  194. +
  195. + data.update(bucket, newNode)
  196. + afterInsert(data)
  197. + node = newNode
  198. + } else if (
  199. + // volatile read to ensure that we have not grown in another thread
  200. + data ne this.data) {
  201. + node = null
  202. + }
  203. + } while (node eq null)
  204. + node.asInstanceOf[T]
  205. + }
  206. + final def contains(key: String): Boolean = {
  207. + getExistingImpl(key) ne null
  208. + }
  209. + final def get(key: String): Option[T] = {
  210. + Option(getExistingImpl(key).asInstanceOf[T])
  211. + }
  212. + final def getExistingImpl(key: String): Node = {
  213. + val data = initial
  214. + val list = data(key.hashCode & (data.length -1))
  215. + getExistingImpl(list,key)
  216. + }
  217. + @tailrec private def getExistingImpl(list: Node, key: String): Node = {
  218. + if (list eq null) null
  219. + else if (list.key == key) list
  220. + else getExistingImpl(list.next, key)
  221. + }
  222. +
  223. + /**
  224. + * get the root of data
  225. + *
  226. + * @return
  227. + */
  228. + private def initial(): Array[Node] = {
  229. +data }
  230. +
  231. + /**
  232. + * rehash and grow
  233. + */
  234. + private def afterInsert(x: Array[Node]): Unit = {
  235. + var data = x
  236. + val newSize = approxSize.incrementAndGet()
  237. + val origLength = data.length
  238. + if (origLength < newSize) {
  239. + this.data.synchronized {
  240. + // if the value has changed already then its not our problem
  241. + data = null
  242. + val size = origLength * 2
  243. + val mask = origLength
  244. + val newData = new Array[Node](size)
  245. +
  246. + var head0: Node = null
  247. + var head1: Node = null
  248. + var sourceIdx = 0
  249. + while (sourceIdx < origLength) {
  250. + head0 = null
  251. + head1 = null
  252. + var tail0: Node = null
  253. + var tail1: Node = null
  254. + var sourceNode = data(sourceIdx)
  255. + while (sourceNode ne null) {
  256. + if ((sourceNode.key.hashCode & mask) == 0) {
  257. + if (head0 eq null) head0 = sourceNode
  258. + else tail0.next = sourceNode
  259. + tail0 = sourceNode
  260. + } else {
  261. + if (head1 eq null) head1 = sourceNode
  262. + else tail1.next = sourceNode
  263. + tail1 = sourceNode
  264. + }
  265. + sourceNode = sourceNode.next
  266. + }
  267. + if (tail0 ne null) tail0.next = null
  268. + if (tail1 ne null) tail1.next = null
  269. + newData.update(sourceIdx, head0)
  270. + newData.update(sourceIdx + mask, head1)
  271. + sourceIdx += 1
  272. + }
  273. + this.data = newData }
  274. +
  275. + }
  276. + }
  277. +
  278. +
  279. + private[this] val approxSize = new AtomicInteger
  280. + private[this] final var data = new Array[Node](1024)
  281. +
  282. +}
  283. Index: test/benchmarks/src/main/scala/scala/reflect/internal/NameLookupBenchmark.scala
  284. IDEA additional info:
  285. Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
  286. <+>UTF-8
  287. ===================================================================
  288. --- test/benchmarks/src/main/scala/scala/reflect/internal/NameLookupBenchmark.scala (date 1555281893594)
  289. +++ test/benchmarks/src/main/scala/scala/reflect/internal/NameLookupBenchmark.scala (date 1555281893594)
  290. @@ -0,0 +1,128 @@
  291. +package scala.reflect.internal
  292. +import java.util.Collections
  293. +import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
  294. +
  295. +import org.openjdk.jmh.annotations._
  296. +import org.openjdk.jmh.infra.Blackhole
  297. +
  298. +import scala.reflect.internal.util.cache._
  299. +
  300. +//@Threads(4)
  301. +//@BenchmarkMode(Array(org.openjdk.jmh.annotations.Mode.Throughput))
  302. +//class NameLookupBenchmark4Threads extends NameLookupBenchmark
  303. +//@Threads(2)
  304. +//@BenchmarkMode(Array(org.openjdk.jmh.annotations.Mode.Throughput))
  305. +//class NameLookupBenchmark2Threads extends NameLookupBenchmark
  306. +
  307. +@BenchmarkMode(Array(org.openjdk.jmh.annotations.Mode.Throughput))
  308. +@Fork(1)
  309. +@Threads(1)
  310. +@Warmup(iterations = 3)
  311. +@Measurement(iterations = 10)
  312. +@State(Scope.Benchmark)
  313. +class NameLookupBenchmark {
  314. + var map : ConcurrentHashMap[String, TestNode] = _
  315. + var cache : NodeInterner[TestNode]= _
  316. + var cacheJ : NodeJInterner[TestNodeJ]= _
  317. + var data : Array[String]= _
  318. + @Setup(Level.Trial) def setup(): Unit = {
  319. + map = new ConcurrentHashMap[String, TestNode]()
  320. + cache = new CacheImpl
  321. + cacheJ = new CacheImplJ
  322. + data = Array.tabulate[String](10000)(_.toString)
  323. + Collections.shuffle(java.util.Arrays.asList(data))
  324. + }
  325. +
  326. + @Benchmark
  327. + def chmGet2(bh: Blackhole): Unit = chmGet(bh)
  328. + @Benchmark
  329. + def chmGet(bh: Blackhole): Unit = {
  330. + var i = 0
  331. + val d = data
  332. + while (i < d.length) {
  333. + val key = data(i)
  334. + var found = map.get(key)
  335. + if (found eq null) {
  336. + val x = new TestNode(key,null)
  337. + found = map.put(key,x)
  338. + if (found eq null) found = x
  339. + }
  340. + bh.consume(found)
  341. + i += 1
  342. + }
  343. + }
  344. +// @Benchmark
  345. +// def chmGetExisting(bh: Blackhole): Unit = {
  346. +// var i = 0
  347. +// val d = data
  348. +// while (i < d.length) {
  349. +// val key = data(i)
  350. +// var found = map.get(key)
  351. +// if (found eq null) {
  352. +// val x = new TestNode(key,null)
  353. +// found = map.put(key,x)
  354. +// if (found eq null) found = x
  355. +// }
  356. +// bh.consume(found)
  357. +// i += 1
  358. +// }
  359. +// }
  360. +
  361. +// @Benchmark
  362. +// def chmCompute(bh: Blackhole): Unit = {
  363. +// var i = 0
  364. +// val d = data
  365. +// while (i < d.length) {
  366. +// val key = data(i)
  367. +// val found = map.computeIfAbsent(key, new TestNode(_, null))
  368. +// bh.consume(found)
  369. +// i += 1
  370. +// }
  371. +// }
  372. +
  373. + @Benchmark
  374. + def lookup(bh: Blackhole): Unit = {
  375. + var i = 0
  376. + val d = data
  377. + while (i < d.length) {
  378. + val key = data(i)
  379. + val found = cache.insertOrFind(key)
  380. + bh.consume(found)
  381. + i += 1
  382. + }
  383. + }
  384. + @Benchmark
  385. + def lookupJ(bh: Blackhole): Unit = {
  386. + var i = 0
  387. + val d = data
  388. + while (i < d.length) {
  389. + val key = data(i)
  390. + val found = cacheJ.insertOrFind(key)
  391. + bh.consume(found)
  392. + i += 1
  393. + }
  394. + }
  395. +// @Benchmark
  396. +// def lookupExisting(bh: Blackhole): Unit = {
  397. +// var i = 0
  398. +// val d = data
  399. +// while (i < d.length) {
  400. +// val key = data(i)
  401. +// val found = cache.insertOrFind(key)
  402. +// bh.consume(found)
  403. +// i += 1
  404. +// }
  405. +// }
  406. +}
  407. +class CacheImpl extends NodeInterner[TestNode]{
  408. +
  409. + override def createNode(key: String, next: TestNode)= new TestNode(key,next){}
  410. +}
  411. +class TestNode(key: String, next: Node) extends Node(key,next)
  412. +
  413. +class CacheImplJ extends NodeJInterner[TestNodeJ]{
  414. +
  415. + override protected def createNodeJ(key: String, next: NodeJ) = new TestNodeJ(key,next){}
  416. +}
  417. +class TestNodeJ(key: String, next: NodeJ) extends NodeJ(key,next)
  418. +
  419. Index: src/reflect/scala/reflect/internal/util/cache/NodeJInterner.java
  420. IDEA additional info:
  421. Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
  422. <+>UTF-8
  423. ===================================================================
  424. --- src/reflect/scala/reflect/internal/util/cache/NodeJInterner.java (date 1555279948323)
  425. +++ src/reflect/scala/reflect/internal/util/cache/NodeJInterner.java (date 1555279948323)
  426. @@ -0,0 +1,136 @@
  427. +package scala.reflect.internal.util.cache;
  428. +
  429. +import java.util.concurrent.atomic.AtomicInteger;
  430. +import java.util.concurrent.atomic.AtomicReference;
  431. +import java.util.concurrent.atomic.AtomicReferenceArray;
  432. +
  433. +public abstract class NodeJInterner<T extends NodeJ> {
  434. +
  435. + public final int size() {
  436. + return approxSize.get();
  437. + }
  438. +
  439. + protected abstract T createNodeJ(String key, NodeJ node);
  440. +
  441. + public final T insertOrFind(String key) {
  442. + int hash = key.hashCode();
  443. + NodeJ oldTail = null;
  444. +
  445. + NodeJ node;
  446. + do {
  447. + //deliberately hiding this.data
  448. + AtomicReferenceArray<NodeJ> data = initial();
  449. + int bucket = hash & (data.length() - 1);
  450. + NodeJ head = data.get(bucket);
  451. + node = head;
  452. + while ((node != null) &&
  453. + // we have already checked the tail
  454. + (node != oldTail) &&
  455. + // its not equal. HashCode is cheap for strings and a good discriminant
  456. + (node.key.hashCode() != hash || !node.key.equals(key)))
  457. + node = node.next;
  458. + if (node == oldTail) node = null;
  459. + if (node == null) {
  460. + // minor optimisation - we can skip this tail if we have to retry
  461. + // if we came to the end of the chain of nodes we dont need to search the same tail if we fail and try again
  462. + oldTail = head;
  463. + NodeJ newNodeJ = (T) createNodeJ(key, head);
  464. + if (data.compareAndSet(bucket, head, newNodeJ) &&
  465. + // volatile read to ensure that we have not grown in another thread
  466. + (data == this.data.get())) {
  467. + afterInsert(data);
  468. + node = newNodeJ;
  469. + }
  470. + } else if (
  471. + // volatile read to ensure that we have not grown in another thread
  472. + data != this.data.get()) {
  473. + node = null;
  474. + }
  475. + } while (node == null);
  476. + return (T) node;
  477. + }
  478. +//final def contains(key: String): Boolean = {
  479. +// getExistingImpl(key) != null
  480. +// }
  481. +//final def get(key: String): Option[T] = {
  482. +// Option(getExistingImpl(key).asInstanceOf[T])
  483. +// }
  484. +//final def getExistingImpl(key: String): NodeJ = {
  485. +// val data = initial
  486. +// val list = data.get(key.hashCode & (data.length() -1))
  487. +// getExistingImpl(list,key)
  488. +// }
  489. +//@tailrec private def getExistingImpl(list: NodeJ, key: String): NodeJ = {
  490. +// if (list == null) null
  491. +// else if (list.key == key) list
  492. +// else getExistingImpl(list.next, key)
  493. +// }
  494. +//
  495. +
  496. + /**
  497. + * get the root of data
  498. + *
  499. + * @return
  500. + */
  501. + private AtomicReferenceArray<NodeJ> initial() {
  502. + //volatile read
  503. + AtomicReferenceArray<NodeJ> result = data.get();
  504. + //null indicates it is in the process of being rehashed
  505. + //updates are applied with synchronisation lock on data
  506. + if (result == null) synchronized (data) {
  507. + //when we have the lock we can guarantee that the other threads rehash is complete
  508. + result = data.get();
  509. + assert result != null;
  510. + }
  511. + return result;
  512. + }
  513. +
  514. + /**
  515. + * rehash and grow
  516. + */
  517. + private void afterInsert(AtomicReferenceArray<NodeJ> data) {
  518. + int newSize = approxSize.incrementAndGet();
  519. + int origLength = data.length();
  520. + if (origLength < (newSize * 2)) {
  521. + synchronized (this.data) {
  522. + // if the value has changed already then its not our problem
  523. + if (this.data.compareAndSet(data, null)) {
  524. + int size = origLength * 2;
  525. + int mask = origLength;
  526. + AtomicReferenceArray<NodeJ> newData = new AtomicReferenceArray<NodeJ>(size);
  527. +
  528. + NodeJ head0, head1;
  529. + int sourceIdx = 0;
  530. + while (sourceIdx < origLength) {
  531. + head0 = head1 = null;
  532. + NodeJ tail0 = null, tail1 = null;
  533. + NodeJ sourceNode = data.get(sourceIdx);
  534. + while (sourceNode != null) {
  535. + if ((sourceNode.key.hashCode() & mask) == 0) {
  536. + if (head0 == null) head0 = sourceNode;
  537. + else tail0.next = sourceNode;
  538. + tail0 = sourceNode;
  539. + } else {
  540. + if (head1 == null) head1 = sourceNode;
  541. + else tail1.next = sourceNode;
  542. + tail1 = sourceNode;
  543. + }
  544. + sourceNode = sourceNode.next;
  545. + }
  546. + if (tail0 != null) tail0.next = null;
  547. + if (tail1 != null) tail1.next = null;
  548. + newData.set(sourceIdx, head0);
  549. + newData.set(sourceIdx + mask, head1);
  550. + sourceIdx += 1;
  551. + }
  552. + this.data.set(newData);
  553. + }
  554. + }
  555. + }
  556. + }
  557. +
  558. +
  559. + private final AtomicInteger approxSize = new AtomicInteger();
  560. + private final AtomicReference<AtomicReferenceArray<NodeJ>> data = new AtomicReference<>(new AtomicReferenceArray<NodeJ>(1024));
  561. +
  562. +}
  563. Index: test/junit/scala/tools/nsc/util/NameLookupTest.scala
  564. IDEA additional info:
  565. Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
  566. <+>UTF-8
  567. ===================================================================
  568. --- test/junit/scala/tools/nsc/util/NameLookupTest.scala (date 1555277860512)
  569. +++ test/junit/scala/tools/nsc/util/NameLookupTest.scala (date 1555277860512)
  570. @@ -0,0 +1,103 @@
  571. +package scala.tools.nsc.util
  572. +
  573. +import java.util
  574. +import java.util.Collections
  575. +import java.util.concurrent.ConcurrentHashMap
  576. +
  577. +import org.junit._
  578. +import Assert._
  579. +
  580. +import scala.reflect.internal.util.cache.{Node, NodeInterner, NodeJ, NodeJInterner}
  581. +import scala.util.Random
  582. +
  583. +object foo extends App
  584. +class NameLookupTest {
  585. + val cache : NodeInterner[TestNode] = new CacheImpl
  586. + val cacheJ : NodeJInterner[TestNodeJ] = new CacheImplJ
  587. + val data : Array[String]= Array.tabulate[String](10000)(_.toString)
  588. + Collections.shuffle(java.util.Arrays.asList(data))
  589. +
  590. + @Test def size(): Unit = {
  591. + assertEquals(0, cache.size)
  592. + val f1 = cache.insertOrFind("foo")
  593. + assertEquals(1, cache.size)
  594. + val f2 = cache.insertOrFind("foo")
  595. + assertSame(f1,f2)
  596. + assertEquals(1, cache.size)
  597. + val f3 = cache.insertOrFind(new String("foo"))
  598. + assertSame(f1,f3)
  599. + }
  600. +
  601. + @Test def insertAll(): Unit = {
  602. + import scala.collection.JavaConverters._
  603. + val all = new util.IdentityHashMap[TestNode, String]()
  604. + val random = new Random()
  605. + for (i <- 1 to data.length * 10) {
  606. + val r = random.nextInt(data.length)
  607. + val key = data(r)
  608. + val cached = cache.insertOrFind(key)
  609. + val existing = all.put(cached, key)
  610. + assertTrue((existing eq null) || existing == key)
  611. + }
  612. + for (i <- 0 until data.length) {
  613. + val key = data(i)
  614. + val existing = all.put(cache.insertOrFind(key), key)
  615. + assertTrue((existing eq null) || existing == key)
  616. + }
  617. + val bad = all.asScala groupBy {_._2} filter {_._2.size > 1}
  618. + assertEquals(0, bad.size)
  619. + assertEquals(data.length, cache.size)
  620. + }
  621. + @Test def sizeJ(): Unit = {
  622. + assertEquals(0, cacheJ.size)
  623. + val f1 = cacheJ.insertOrFind("foo")
  624. + assertEquals(1, cacheJ.size)
  625. + val f2 = cacheJ.insertOrFind("foo")
  626. + assertSame(f1,f2)
  627. + assertEquals(1, cacheJ.size)
  628. + val f3 = cacheJ.insertOrFind(new String("foo"))
  629. + assertSame(f1,f3)
  630. + }
  631. +
  632. + @Test def insertAllJ(): Unit = {
  633. + import scala.collection.JavaConverters._
  634. + val all = new util.IdentityHashMap[TestNodeJ, String]()
  635. + val random = new Random()
  636. + for (i <- 1 to data.length * 10) {
  637. + val r = random.nextInt(data.length)
  638. + val key = data(r)
  639. + val cacheJd = cacheJ.insertOrFind(key)
  640. + val existing = all.put(cacheJd, key)
  641. + assertTrue((existing eq null) || existing == key)
  642. + }
  643. + for (i <- 0 until data.length) {
  644. + val key = data(i)
  645. + val existing = all.put(cacheJ.insertOrFind(key), key)
  646. + assertTrue((existing eq null) || existing == key)
  647. + }
  648. + val bad = all.asScala groupBy {_._2} filter {_._2.size > 1}
  649. + assertEquals(0, bad.size)
  650. + assertEquals(data.length, cacheJ.size)
  651. + }
  652. +
  653. +}
  654. +
  655. +
  656. +class CacheImpl extends NodeInterner[TestNode]{
  657. + val created = new util.HashSet[String]()
  658. + override def createNode(key: String, next: TestNode): TestNode = {
  659. + assert (created.add(key), key)
  660. + new TestNode(key,next){}
  661. + }
  662. +}
  663. +class TestNode(key: String, next: Node) extends Node(key,next)
  664. +
  665. +class CacheImplJ extends NodeJInterner[TestNodeJ]{
  666. + val created = new util.HashSet[String]()
  667. +
  668. + override protected def createNodeJ(key: String, node: NodeJ): TestNodeJ = {
  669. + assert (created.add(key), key)
  670. + new TestNodeJ(key,node){}
  671. + }
  672. +}
  673. +class TestNodeJ(key: String, next: NodeJ) extends NodeJ(key,next)
  674. \ No newline at end of file
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement