diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..0a5b969 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,26 @@ +name: test +on: + push: + branches: + - main + pull_request: +jobs: + test: + strategy: + fail-fast: false + matrix: + java: [8, 11, 17, 21] + scala: [2.11.x, 2.12.x, 2.13.x, 3.x] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: coursier/cache-action@v6 + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{matrix.java}} + - uses: sbt/setup-sbt@v1 + - name: Test + run: sbt ++${{matrix.scala}} test headerCheck versionPolicyCheck publishLocal diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml new file mode 100644 index 0000000..3549ded --- /dev/null +++ b/.github/workflows/cla.yml @@ -0,0 +1,11 @@ +name: "Check Scala CLA" +on: + pull_request: +jobs: + cla-check: + runs-on: ubuntu-latest + steps: + - name: Verify CLA + uses: scala/cla-checker@v1 + with: + author: ${{ github.event.pull_request.user.login }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..1e5360b --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,22 @@ +name: Release +on: + push: + tags: ["*"] +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: 8 + - uses: sbt/setup-sbt@v1 + - run: sbt versionCheck ci-release + env: + PGP_PASSPHRASE: ${{secrets.PGP_PASSPHRASE}} + PGP_SECRET: ${{secrets.PGP_SECRET}} + SONATYPE_PASSWORD: ${{secrets.SONATYPE_PASSWORD}} + SONATYPE_USERNAME: ${{secrets.SONATYPE_USERNAME}} diff --git a/.gitignore b/.gitignore index 86fb982..41ed2bf 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ target /.settings /.target /bin +benchmark/JmhBench.scala \ No newline at end of file diff --git a/.jvmopts b/.jvmopts new file mode 100644 index 0000000..56aefe0 --- /dev/null +++ b/.jvmopts @@ -0,0 +1 @@ +-Xmx4G diff --git a/.scala-steward.conf b/.scala-steward.conf new file mode 100644 index 0000000..4b6a192 --- /dev/null +++ b/.scala-steward.conf @@ -0,0 +1,4 @@ +updates.ignore = [ + # see https://github.com/scala/scala-java8-compat/pull/306 + { groupId = "org.apache.commons", artifactId = "commons-lang3" } +] diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index a5a7a74..0000000 --- a/.travis.yml +++ /dev/null @@ -1,20 +0,0 @@ -language: scala -env: - global: - - PUBLISH_JDK=oraclejdk8 - # PGP_PASSPHRASE - - secure: "BzgzRZLYa52rS/hBfzf43b++CfDhdcd3Mmu8tsyBHgThSQOd2YBLbV5kWD8aYVFKVHfW7XX0PTe3F+rR/fFZqGItE6o8Px0Y7Vzb5pqjlaQdxFEJ+WrsnshS0xuAKZ7OwVHRp+d+jznaCwRxEo2vpW3ko1OPAJ8cxfhVL/4C1I0=" - # SONA_USER - - secure: "lx2qFeFxh9AFmyHR7hH4Qf9flIEx8VgYj6ebzuxp1cc1ZZiXHC1256x0bHFDUH9bhJACOazOrco/+v6MBAriBkWxLBc98FrC6OkVeQMFW2ffWSBuHRclilKsQA/Lsgc81Wg+WV105hOqUNAkTXgroblInNt+KS+DhC/8FVoh9ZY=" - # SONA_PASS - - secure: "FZC+FZnBNeklA150vW5QDZJ5J7t+DExJrgyXWM46Wh0MobjH8cvydgC3qatItb0rDBV8l7zO1LDwl2KEi92aefw2a8E49z6qVOHgUXiI3SAx7M0UO0FFeKPmTXCLcBlbnGLcUqNjIZfuIEufQvPblKTl8qN4eMmcMn9jsNzJr28=" -script: - - admin/build.sh -scala: - - 2.10.4 - - 2.11.5 -jdk: - - oraclejdk8 -notifications: - email: - - jason.zaugg@typesafe.com diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..0511f21 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,7 @@ +all repositories in these organizations: + +* [scala](https://github.com/scala) +* [scalacenter](https://github.com/scalacenter) +* [lampepfl](https://github.com/lampepfl) + +are covered by the Scala Code of Conduct: https://scala-lang.org/conduct/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e80eb0f..93ace65 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,3 +1,3 @@ ## Contributing -This project has the same [guidelines for contributors](https://github.com/scala/scala/blob/master/CONTRIBUTING.md) as scala/scala. +This project has the same [guidelines for contributors](https://github.com/scala/scala/blob/2.13.x/CONTRIBUTING.md) as scala/scala. diff --git a/LICENSE b/LICENSE index ce51bda..261eeb9 100644 --- a/LICENSE +++ b/LICENSE @@ -1,28 +1,201 @@ -Copyright (c) 2002-2013 EPFL -Copyright (c) 2011-2013 Typesafe, Inc. - -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the EPFL nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000..a3069a2 --- /dev/null +++ b/NOTICE @@ -0,0 +1,14 @@ +scala-java8-compat +Copyright (c) 2002-2025 EPFL +Copyright (c) 2011-2025 Lightbend, Inc. dba Akka + +scala-java8-compat includes software developed at +LAMP/EPFL (https://lamp.epfl.ch/) and +Akka (https://akka.io/). + +Licensed under the Apache License, Version 2.0 (the "License"). +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/README.md b/README.md index 85d1e6f..15a28df 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,18 @@ -## scala-java8-compat [](https://travis-ci.org/scala/scala-java8-compat) [![Maven Central 2.11](https://maven-badges.herokuapp.com/maven-central/org.scala-lang.modules/scala-java8-compat_2.11/badge.svg)](https://maven-badges.herokuapp.com/maven-central/org.scala-lang.modules/scala-java8-compat_2.11) +# scala-java8-compat [](http://search.maven.org/#search%7Cga%7C1%7Cg%3Aorg.scala-lang.modules%20a%3Ascala-java8-compat_2.11) [](http://search.maven.org/#search%7Cga%7C1%7Cg%3Aorg.scala-lang.modules%20a%3Ascala-java8-compat_2.12) [](http://search.maven.org/#search%7Cga%7C1%7Cg%3Aorg.scala-lang.modules%20a%3Ascala-java8-compat_2.13) [](http://search.maven.org/#search%7Cga%7C1%7Cg%3Aorg.scala-lang.modules%20a%3Ascala-java8-compat_3) -A Java 8 compatibility kit for Scala. +A Java 8 compatibility kit for Scala 2.12 and 2.11. -The API is currently still experimental: we do not yet guarantee source or binary compatibility with future releases. +Javadoc is [here](https://javadoc.io/doc/org.scala-lang.modules/scala-java8-compat_2.13/latest/index.html). + +## Do you need this? + +If you are using Scala 2.13 or newer only, then don't use this library! Use the classes under `scala.jdk` instead; they were added to the standard library in 2.13. + +We do publish 2.13 and 3.0 artifacts of scala-java8-compat, but they're only intended to be used in projects which crossbuild with 2.12 and/or 2.11. + +## Maintenance status + +This library is community-maintained. (The Scala team at Akka provides infrastructure and oversight.) ## Functional Interfaces for Scala functions @@ -25,9 +35,74 @@ class Test { [More Examples / Documentation](src/test/java/scala/compat/java8/LambdaTest.java) +## Converters between `scala.FunctionN` and `java.util.function` + +A set of converters that enable interconversion between Java's standard +Functional Interfaces defined in `java.util.function` and Scala's `Function0`, +`Function1`, and `Function2` traits. These are intended for use when you +already have an instance of a `java.util.function` and need a Scala function, +or have a Scala function and need an instance of a `java.util.function`. + +The `.asScala` extension method will convert a `java.util.function` to the corresponding +Scala function. The `.asJava` extension method will convert a Scala function to +the most specific corresponding Java functional interface. If you wish to obtain +a less specific functional interface, there are named methods that start with `asJava` +and continue with the name of the Java functional interface. For instance, the +most specific interface corresponding to the Scala function `val rev = (s: String) => s.reverse` +is `UnaryOperator[String]`, and that is what `rev.asJava` will produce. However, +`asJavaFunction(rev)` will return a `java.util.function.Function[String, String]` instead. + +The `asJava` methods can also be called conveniently from Java. There are additional +`asScalaFrom` methods (e.g. `asScalaFromUnaryOperator`) that will perform the +functional-interface-to-Scala-function conversion; this is primarily of use when calling +from Java since the `.asScala` extension method is more convenient in Scala. + +#### Usage examples + +In Scala: + +```scala +import java.util.function._ +import scala.compat.java8.FunctionConverters._ + +val foo: Int => Boolean = i => i > 7 +def testBig(ip: IntPredicate) = ip.test(9) +println(testBig(foo.asJava)) // Prints true + +val bar = new UnaryOperator[String]{ def apply(s: String) = s.reverse } +List("cod", "herring").map(bar.asScala) // List("doc", "gnirrih") + +def testA[A](p: Predicate[A])(a: A) = p.test(a) +println(testA(asJavaPredicate(foo))(4)) // Prints false + +// println(testA(foo.asJava)(4)) <-- doesn't work +// IntPredicate does not extend Predicate! +``` + +In Java: + +```java +import java.util.function.*; +import scala.compat.java8.FunctionConverters; + +class Example { + String foo(UnaryOperator f) { + return f.apply("halibut"); + } + String bar(scala.Function1 f) { + return foo(functionConverters.asJavaUnaryOperator(f)); + } + String baz(Function f) { + return bar(functionConverters.asScalaFromFunction(f)); + } +} +``` + ## Converters between `scala.concurrent` and `java.util.concurrent` - - [API](src/main/java/scala/compat/java8/FutureConverters.java) +Conversion between Java's concurrency primitives (`CompletionStage` and `CompletableFuture`) and the Scala concurrency primitives (`Promise` and `Future`) is enabled with `scala.compat.java8.FutureConverters` singleton object: + + - [API](src/main/scala/scala/compat/java8/FutureConverters.scala) - [Test Cases](src/test/java/scala/compat/java8/FutureConvertersTest.java) ## Converters between `scala.Option` and `java.util` classes `Optional`, `OptionalDouble`, `OptionalInt`, and `OptionalLong`. @@ -53,6 +128,169 @@ class Test { } ``` -## Future work - - Converters for `java.util.function`, `java.util.stream` - - [`Spliterator`](https://docs.oracle.com/javase/8/docs/api/java/util/Spliterator.html)s for Scala collections + +## Converters from Scala collections to Java 8 Streams + +Scala collections gain `seqStream` and `parStream` as extension methods that produce a Java 8 Stream +running sequentially or in parallel, respectively. These are automatically specialized to a primitive +type if possible, including automatically applied widening conversions. For instance, `List(1,2).seqStream` +produces an `IntStream`, and so does `List(1.toShort, 2.toShort).parStream`. Maps additionally have +`seqKeyStream`, `seqValueStream`, `parKeyStream`, and `parValueStream` methods. + +Scala collections also gain `accumulate` and `stepper` methods that produce utility collections that +can be useful when working with Java 8 Streams. `accumulate` produces an `Accumulator` or its primitive +counterpart (`DoubleAccumulator`, etc.), which is a low-level collection designed for efficient collection +and dispatching of results to and from Streams. Unlike most collections, it can contain more than +`Int.MaxValue` elements. + +`stepper` produces a `Stepper` which is a fusion of `Spliterator` and `Iterator`. `Stepper`s underlie the Scala +collections' instances of Java 8 Streams. Steppers are intended as low-level building blocks for streams. +Usually you would not create them directly or call their methods but you can implement them alongside custom +collections to get better performance when streaming from these collections. + +Java 8 Streams gain `toScala[Coll]` and `accumulate` methods, to make it easy to produce Scala collections +or Accumulators, respectively, from Java 8 Streams. For instance, `myStream.to[Vector]` will collect the +contents of a Stream into a `scala.collection.immutable.Vector`. Note that standard sequential builders +are used for collections, so this is best done to gather the results of an expensive computation. + +Finally, there is a Java class, `ScalaStreamSupport`, that has a series of `stream` methods that can be used to +obtain Java 8 Streams from Scala collections from within Java. + +#### Performance Considerations + +For sequential operations, Scala's `iterator` almost always equals or exceeds the performance of a Java 8 stream. Thus, +one should favor `iterator` (and its richer set of operations) over `seqStream` for general use. However, long +chains of processing of primitive types can sometimes benefit from the manually specialized methods in `DoubleStream`, +`IntStream`, and `LongStream`. + +Note that although `iterator` typically has superior performance in a sequential context, the advantage is modest +(usually less than 50% higher throughput for `iterator`). + +For parallel operations, `parStream` and even `seqStream.parallel` meets or exceeds the performance of Scala parallel +collections methods (invoked with `.par`). Especially for small collections, the difference can be substantial. In +some cases, when a Scala (parallel) collection is the ultimate result, Scala parallel collections can have an advantage +as the collection can (in some cases) be built in parallel. + +Because the wrappers are invoked based on the static type of the collection, there are also cases where parallelization +is inefficient when interfacing with Java 8 Streams (e.g. when a collection is typed as `Seq[String]` so might have linear +access like `List`, but actually is a `WrappedArray[String]` (`ArraySeq` on 2.13) that can be efficiently parallelized) but can be efficient +with Scala parallel collections. The `parStream` method is only available when the static type is known to be compatible +with rapid parallel operation; `seqStream` can be parallelized by using `.parallel`, but may or may not be efficient. + +If the operations available on Java 8 Streams are sufficient, the collection type is known statically with enough precision +to enable parStream, and an `Accumulator` or non-collection type is an acceptable result, Java 8 Streams will essentially +always outperform the Scala parallel collections. + +#### Scala Usage Example + +```scala +import scala.compat.java8.StreamConverters._ + +object Test { + val m = collection.immutable.HashMap("fish" -> 2, "bird" -> 4) + val s = m.parValueStream.sum // 6, potientially computed in parallel + val t = m.seqKeyStream.toScala[List] // List("fish", "bird") + val a = m.accumulate // Accumulator[(String, Int)] + + val n = a.stepper.fold(0)(_ + _._1.length) + + a.parStream.count // 8 + 2 = 10 + + val b = java.util.Arrays.stream(Array(2L, 3L, 4L)). + accumulate // LongAccumulator + val l = b.to[List] // List(2L, 3L, 4L) +} +``` + +#### Using Java 8 Streams with Scala Function Converters + +Scala can emit Java SAMs for lambda expressions that are arguments to methods that take a Java SAM rather than +a Scala Function. However, it can be convenient to restrict the SAM interface to interactions with Java code +(including Java 8 Streams) rather than having it propagate throughout Scala code. + +Using Java 8 Stream converters together with function converters allows one to accomplish this with only a modest +amount of fuss. + +Example: + +```scala +import scala.compat.java8.FunctionConverters._ +import scala.compat.java8.StreamConverters._ + +def mapToSortedString[A](xs: Vector[A], f: A => String, sep: String) = + xs.parStream. // Creates java.util.stream.Stream[String] + map[String](f.asJava).sorted. // Maps A to String and sorts (in parallel) + toArray.mkString(sep) // Back to an Array to use Scala's mkString +``` + +Note that explicit creation of a new lambda will tend to lead to improved type inference and at least equal +performance: + +```scala +def mapToSortedString[A](xs: Vector[A], f: A => String, sep: String) = + xs.parStream. + map[String](a => f(a)).sorted. // Explicit lambda creates a SAM wrapper for f + toArray.mkString(sep) +``` + +#### Java Usage Example + +To convert a Scala collection to a Java 8 Stream from within Java, it usually +suffices to call `ScalaStreamSupport.stream(xs)` on your collection `xs`. If `xs` is +a map, you may wish to get the keys or values alone by using `fromKeys` or +`fromValues`. If the collection has an underlying representation that is not +efficiently parallelized (e.g. `scala.collection.immutable.List`), then +`fromAccumulated` (and `fromAccumulatedKeys` and `fromAccumulatedValues`) will +first gather the collection into an `Accumulator` and then return a stream over +that accumulator. If not running in parallel, `from` is preferable (faster and +less memory usage). + +Note that a Scala `Iterator` cannot fulfill the contract of a Java 8 Stream +(because it cannot support `trySplit` if it is called). Presently, one must +call `fromAccumulated` on the `Iterator` to cache it, even if the Stream will +be evaluated sequentially, or wrap it as a Java Iterator and use static +methods in `Spliterator` to wrap that as a `Spliterator` and then a `Stream`. + +Here is an example of conversion of a Scala collection within Java 8: + +```java +import scala.collection.mutable.ArrayBuffer; +import scala.compat.java8.ScalaStreamSupport; + +public class StreamConvertersExample { + public int MakeAndUseArrayBuffer() { + ArrayBuffer ab = new ArrayBuffer(); + ab.$plus$eq("salmon"); + ab.$plus$eq("herring"); + return ScalaStreamSupport.stream(ab).mapToInt(x -> x.length()).sum(); // 6+7 = 13 + } +} +``` + +## Converters between `scala.concurrent.duration.FiniteDuration` and `java.time.Duration` + +Interconversion between Java's standard `java.time.Duration` type +and the `scala.concurrent.duration.FiniteDuration` types. The Java `Duration` does +not contain a time unit, so when converting from `FiniteDuration` the time unit used +to create it is lost. + +For the opposite conversion a `Duration` can potentially express a larger time span than +a `FiniteDuration`, for such cases an exception is thrown. + +Example of conversions from the Java type ways: + +```scala +import scala.concurrent.duration._ +import scala.compat.java8.DurationConverters._ + +val javaDuration: java.time.Duration = 5.seconds.toJava +val finiteDuration: FiniteDuration = javaDuration.toScala +``` + +From Java: +```java +import scala.compat.java8.DurationConverters; +import scala.concurrent.duration.FiniteDuration; + +DurationConverters.toScala(Duration.of(5, ChronoUnit.SECONDS)); +DurationConverters.toJava(FiniteDuration.create(5, TimeUnit.SECONDS)); +``` diff --git a/admin/README.md b/admin/README.md deleted file mode 100644 index 3b38f19..0000000 --- a/admin/README.md +++ /dev/null @@ -1,60 +0,0 @@ -## Tag Driven Releasing - -### Background Reading - - - http://docs.travis-ci.com/user/environment-variables/ - - http://docs.travis-ci.com/user/encryption-keys/ - - http://docs.travis-ci.com/user/encrypting-files/ - -### Initial setup for the repository - -To configure tag driven releases from Travis CI. - - 1. Generate a key pair for this repository with `./admin/genKeyPair.sh`. - Edit `.travis.yml` and `admin/build.sh` as prompted. - 2. Publish the public key to https://pgp.mit.edu - 3. Store other secrets as encrypted environment variables with `admin/encryptEnvVars.sh`. - Edit `.travis.yml` as prompted. - 4. Edit `.travis.yml` to use `./admin/build.sh` as the build script, - and edit that script to use the tasks required for this project. - 5. Edit `.travis.yml` to select which JDK will be used for publishing. - -It is important to add comments in .travis.yml to identify the name -of each environment variable encoded in a `:secure` section. - -After all of these steps, your .travis.yml should contain config of the -form: - - language: scala - env: - global: - - PUBLISH_JDK=openjdk6 - # PGP_PASSPHRASE - - secure: "XXXXXX" - # SONA_USER - - secure: "XXXXXX" - # SONA_PASS - - secure: "XXXXXX" - script: - - admin/build.sh - -If Sonatype credentials change in the future, step 3 can be repeated -without generating a new key. - -Be sure to use SBT 0.13.7 or higher to avoid [#1430](https://github.com/sbt/sbt/issues/1430)! - -### Testing - - 1. Follow the release process below to create a dummy release (e.g. 0.1.0-TEST1). - Confirm that the release was staged to Sonatype but do not release it to Maven - central. Instead, drop the staging repository. - -### Performing a release - - 1. Create a GitHub "Release" (with a corresponding tag) via the GitHub - web interface. - 2. Travis CI will schedule a build for this release. Review the build logs. - 3. Log into https://oss.sonatype.org/ and identify the staging repository. - 4. Sanity check its contents - 5. Release staging repository to Maven and send out release announcement. - diff --git a/admin/build.sh b/admin/build.sh deleted file mode 100755 index b67abcf..0000000 --- a/admin/build.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash - -set -e - -# prep environment for publish to sonatype staging if the HEAD commit is tagged - -# git on travis does not fetch tags, but we have TRAVIS_TAG -# headTag=$(git describe --exact-match ||:) - -if [ "$TRAVIS_JDK_VERSION" == "$PUBLISH_JDK" ] && [[ "$TRAVIS_TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+(-[A-Za-z0-9-]+)? ]]; then - echo "Going to release from tag $TRAVIS_TAG!" - myVer=$(echo $TRAVIS_TAG | sed -e s/^v// | sed -e 's/_[0-9]*\.[0-9]*//') - publishVersion='set every version := "'$myVer'"' - extraTarget="publish-signed" - cat admin/gpg.sbt >> project/plugins.sbt - cp admin/publish-settings.sbt . - - # Copied from the output of genKeyPair.sh - K=$encrypted_1ce132863fa7_key - IV=$encrypted_1ce132863fa7_iv - - openssl aes-256-cbc -K $K -iv $IV -in admin/secring.asc.enc -out admin/secring.asc -d -fi - -sbt ++$TRAVIS_SCALA_VERSION "$publishVersion" clean update test publishLocal $extraTarget diff --git a/admin/encryptEnvVars.sh b/admin/encryptEnvVars.sh deleted file mode 100755 index b625667..0000000 --- a/admin/encryptEnvVars.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -# -# Encrypt sonatype credentials so that they can be -# decrypted in trusted builds on Travis CI. -# -set -e - -read -s -p 'SONA_USER: ' SONA_USER -travis encrypt SONA_USER="$SONA_USER" -read -s -p 'SONA_PASS: ' SONA_PASS -travis encrypt SONA_PASS="$SONA_PASS" diff --git a/admin/genKeyPair.sh b/admin/genKeyPair.sh deleted file mode 100755 index 11f7a1e..0000000 --- a/admin/genKeyPair.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash -# -# Generates a key pair for this repository to sign artifacts. -# Encrypt the private key and its passphrase in trusted builds -# on Travis CI. -# -set -e - -# Based on https://gist.github.com/kzap/5819745: -function promptDelete() { - if [[ -f "$1" ]]; then - echo About to delete $1, Enter for okay / CTRL-C to cancel - read - rm "$1" - fi -} -for f in admin/secring.asc.enc admin/secring.asc admin/pubring.asc; do promptDelete "$f"; done - -echo Generating key pair. Please enter 1. repo name 2. scala-internals@googlegroups.com, 3. a new passphrase -cp admin/gpg.sbt project -sbt 'set pgpReadOnly := false' \ - 'set pgpPublicRing := file("admin/pubring.asc")' \ - 'set pgpSecretRing := file("admin/secring.asc")' \ - 'pgp-cmd gen-key' -rm project/gpg.sbt - -echo ============================================================================================ -echo Encrypting admin/secring.asc. Update K and IV variables in admin/build.sh accordingly. -echo ============================================================================================ -travis encrypt-file admin/secring.asc -rm admin/secring.asc -mv secring.asc.enc admin - -echo ============================================================================================ -echo Encrypting environment variables. Add each to a line in .travis.yml. Include a comment -echo with the name of the corresponding variable -echo ============================================================================================ -read -s -p 'PGP_PASSPHRASE: ' PGP_PASSPHRASE -travis encrypt PGP_PASSPHRASE="$PGP_PASSPHRASE" - diff --git a/admin/gpg.sbt b/admin/gpg.sbt deleted file mode 100644 index d60e366..0000000 --- a/admin/gpg.sbt +++ /dev/null @@ -1,2 +0,0 @@ - -addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.3") // only added when publishing: diff --git a/admin/publish-settings.sbt b/admin/publish-settings.sbt deleted file mode 100644 index f763ea0..0000000 --- a/admin/publish-settings.sbt +++ /dev/null @@ -1,9 +0,0 @@ -def env(key: String) = Option(System.getenv(key)).getOrElse("") - -pgpPassphrase := Some(env("PGP_PASSPHRASE").toArray) - -pgpPublicRing := file("admin/pubring.asc") - -pgpSecretRing := file("admin/secring.asc") - -credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", env("SONA_USER"), env("SONA_PASS")) diff --git a/admin/pubring.asc b/admin/pubring.asc deleted file mode 100644 index 5d5dd87..0000000 --- a/admin/pubring.asc +++ /dev/null @@ -1,18 +0,0 @@ ------BEGIN PGP PUBLIC KEY BLOCK----- -Version: BCPG v1.49 - -mQENBFS1xA0BCAC0t2c5MhkWyUbkWsZM4DmIN+/pDjNCr2DNmbIG3gB8i4MI71q/ -fj+Ob0lemjJNnNc4ii6+s9RrOcwR1EU4IA8mO79NN+i2yVUhe0LmOWgyfXvG8Qpg -hLmdMrkgOHK0hpWbXJ0i2NGPch4gI6YRJF95yLojz2KENmiYGmSD8p1It06O2824 -Xhqc5Cm72/qXvonHP1+MugjiPxmyZN3ajSol0P7tZlgB7ikqpyL3kZXkc162bJ+H -U6y6qUCcQqS5VQ7Fv9bIbTNOjN4ELLJn2ffLVe3ujRG6seioL0MfuQ/gV9IpGcGO -Dew8Xu79QdDyVHQKgDy9N/J276JZ4j9nYCCxABEBAAG0NXNjYWxhLWphdmE4LWNv -bXBhdCA8c2NhbGEtaW50ZXJuYWxzQGdvb2dsZWdyb3Vwcy5jb20+iQEcBBMBAgAG -BQJUtcQNAAoJEGQWNEmlKase8pAH/Rb45Px88u7DDT53DU68zh84oDZLv9i46g7g -16KI97nz17F9OEHdkzNEUA3EgCD1d2k+c/GIdQKg3avVdpNM7krK5SSNgHKcwe/F -0YGMxvh+LgeK1JDuXFbwLJKR+7VIGVKkjw+Z2TC8hZfnD6Qy6c4xkukoBs6yfWQO -tf8gSH6oQox4UIOB/+ADyypl9mnRxgdi1uPvd6UJnL/n9UDE8v1k+8WzO34nTVZr -xWN28pAun5VpLuEq4GAr2JRfRiF+N0hGuS+htiU6hnO81BBK+NusWxI9Aitu8Zyh -eulWpROXvUOw1eJequutgyGwEEQkRi+Yu+2eSM2/EPCWiLXkODk= -=Qro7 ------END PGP PUBLIC KEY BLOCK----- diff --git a/admin/secring.asc.enc b/admin/secring.asc.enc deleted file mode 100644 index bc600f9..0000000 Binary files a/admin/secring.asc.enc and /dev/null differ diff --git a/benchmark/README.md b/benchmark/README.md new file mode 100644 index 0000000..2d6e4e9 --- /dev/null +++ b/benchmark/README.md @@ -0,0 +1,27 @@ +# Benchmark suite for Java 8 Streams compatibility layer + +This project is intended to support semi-manual benchmarking of the Java 8 streams compatibility layer in Scala collections. + +Because the benchmarking is **very computationally expensive** it should be done occasionally, not automatically. + +## Code generation step + +1. Make sure the parent project has been built by running `sbt package` in it. + +2. `cd` to the benchmark project and run `sbt generateJmh` + +## Benchmarking step + +1. Make sure your terminal has plenty of lines of scrollback. (A couple thousand should do.) + +2. Run `sbt "jmh:run -i 5 -wi 3 -f 5"`. Wait overnight. + +3. Clip off the last set of lines from the terminal window starting before the line that contains `[info] # Run complete. Total time:` and including that line until the end. + +4. Save that in the file `results/jmhbench.log` + +## Comparison step + +1. Run `sbt parseJmh` + +2. Look at the ASCII art results showing speed comparisons. diff --git a/benchmark/build.sbt b/benchmark/build.sbt new file mode 100644 index 0000000..393b763 --- /dev/null +++ b/benchmark/build.sbt @@ -0,0 +1,17 @@ +enablePlugins(JmhPlugin) + +val generateJmh = TaskKey[Unit]("generateJmh", "Generates JMH benchmark sources.") +val parseJmh = TaskKey[Unit]("parseJmh", "Parses JMH benchmark logs in results/jmhbench.log.") + +lazy val root = (project in file(".")).settings( + name := "java8-compat-bench", + scalaVersion := "2.11.12", + crossScalaVersions := List("2.11.12" /* TODO, "2.12.0-M4"*/), + organization := "org.scala-lang.modules", + version := "0.6.0-SNAPSHOT", + unmanagedJars in Compile ++= Seq(baseDirectory.value / "../target/scala-2.11/scala-java8-compat_2.11-0.9.0-SNAPSHOT.jar"), + // This would be nicer but sbt-jmh doesn't like it: + //unmanagedClasspath in Compile += Attributed.blank(baseDirectory.value / "../target/scala-2.11/classes"), + generateJmh := (runMain in Compile).toTask(" bench.codegen.GenJmhBench").value, + parseJmh := (runMain in Compile).toTask(" bench.examine.ParseJmhLog").value +) diff --git a/benchmark/project/plugins.sbt b/benchmark/project/plugins.sbt new file mode 100644 index 0000000..f5319fb --- /dev/null +++ b/benchmark/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.6") diff --git a/benchmark/results/jmhbench.graphs.ascii b/benchmark/results/jmhbench.graphs.ascii new file mode 100644 index 0000000..b6f4c9d --- /dev/null +++ b/benchmark/results/jmhbench.graphs.ascii @@ -0,0 +1,2537 @@ +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +filter/map/sum trio +10 elements +collection.mutable.ArrayBuffer + Int, base collection ##= 0.112 +- 9.4 % + Int, iterator on coll #### 0.160 +- 14.7 % + Int, serial stream ###- 0.129 +- 9.6 % + +filter/map/sum trio +10 elements +Array + Int, base collection ######################### 1.000 +- 0.2 % + Int, iterator on coll ###= 0.150 +- 10.1 % + Int, serial stream #####= 0.223 +- 6.0 % + +filter/map/sum trio +10 elements +collection.mutable.ArraySeq + Int, base collection #= 0.068 +- 12.4 % + Int, iterator on coll ##### 0.197 +- 11.1 % + Int, serial stream ### 0.122 +- 8.4 % + +filter/map/sum trio +10 elements +collection.mutable.ArrayStack + Int, base collection ## 0.081 +- 5.1 % + Int, iterator on coll ####- 0.171 +- 1.4 % + Int, serial stream ##= 0.113 +- 7.8 % + +filter/map/sum trio +10 elements +collection.immutable.ListSet + Int, base collection # 0.037 +- 15.6 % + Int, iterator on coll ####= 0.185 +- 15.0 % + Int, serial stream ##- 0.091 +- 6.1 % + +filter/map/sum trio +10 elements +collection.immutable.HashSet + Int, base collection #= 0.069 +- 13.0 % + Int, iterator on coll ###- 0.132 +- 14.3 % + Int, serial stream ##= 0.111 +- 10.1 % + +filter/map/sum trio +10 elements +java.util.ArrayList + Int, serial stream ###= 0.144 +- 7.1 % + +filter/map/sum trio +10 elements +java.util.LinkedList + Int, serial stream ###= 0.141 +- 3.2 % + +filter/map/sum trio +10 elements +collection.mutable.LinkedHashSet + Int, base collection ## 0.081 +- 17.3 % + Int, iterator on coll ####= 0.189 +- 0.6 % + Int, serial stream ### 0.114 +- 0.8 % + +filter/map/sum trio +10 elements +collection.immutable.List + Int, base collection ##- 0.099 +- 6.1 % + Int, iterator on coll #### 0.165 +- 20.9 % + Int, serial stream ##= 0.104 +- 11.6 % + +filter/map/sum trio +10 elements +collection.mutable.HashSet + Int, base collection #- 0.060 +- 1.1 % + Int, iterator on coll ##= 0.106 +- 1.2 % + Int, serial stream ##= 0.101 +- 6.6 % + +filter/map/sum trio +10 elements +collection.mutable.Queue + Int, base collection # 0.035 +- 13.4 % + Int, iterator on coll ##= 0.106 +- 0.8 % + Int, serial stream # 0.046 +- 7.3 % + +filter/map/sum trio +10 elements +collection.mutable.PriorityQueue + Int, base collection ##- 0.092 +- 3.6 % + Int, iterator on coll ####- 0.174 +- 12.0 % + Int, serial stream ### 0.118 +- 7.8 % + +filter/map/sum trio +10 elements +collection.immutable.Queue + Int, base collection #= 0.069 +- 1.7 % + Int, iterator on coll ### 0.124 +- 21.0 % + Int, serial stream ##= 0.102 +- 3.2 % + +filter/map/sum trio +10 elements +collection.immutable.Stream + Int, base collection ## 0.075 +- 5.5 % + Int, iterator on coll #- 0.056 +- 4.7 % + Int, serial stream ##= 0.102 +- 15.3 % + +filter/map/sum trio +10 elements +collection.immutable.TreeSet + Int, base collection #= 0.069 +- 3.9 % + Int, iterator on coll ### 0.125 +- 3.5 % + Int, serial stream ##= 0.108 +- 1.4 % + +filter/map/sum trio +10 elements +collection.immutable.Vector + Int, base collection ##- 0.093 +- 2.9 % + Int, iterator on coll #### 0.164 +- 1.2 % + Int, serial stream ###- 0.127 +- 2.7 % + +filter/map/sum trio +10 elements +collection.mutable.WrappedArray + Int, base collection ##- 0.095 +- 2.1 % + Int, iterator on coll ###- 0.128 +- 0.6 % + Int, serial stream ###- 0.129 +- 4.9 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +filter/map/sum trio +10000 elements +collection.mutable.ArrayBuffer + Int, base collection #####= 0.225 +- 0.7 % + Int, iterator on coll ######- 0.258 +- 0.4 % + Int, serial stream ##= 0.101 +- 7.9 % + +filter/map/sum trio +10000 elements +Array + Int, base collection ######################### 1.000 +- 0.1 % + Int, iterator on coll #### 0.165 +- 2.9 % + Int, serial stream ####################= 0.833 +- 1.8 % + +filter/map/sum trio +10000 elements +collection.mutable.ArraySeq + Int, base collection ####= 0.181 +- 3.2 % + Int, iterator on coll ######- 0.258 +- 0.7 % + Int, serial stream ##- 0.098 +- 8.6 % + +filter/map/sum trio +10000 elements +collection.mutable.ArrayStack + Int, base collection ####= 0.187 +- 1.3 % + Int, iterator on coll ######= 0.273 +- 0.5 % + Int, serial stream ##= 0.109 +- 5.0 % + +filter/map/sum trio +10000 elements +collection.immutable.ListSet + Int, base collection 0.005 +- 5.2 % + Int, iterator on coll #####- 0.214 +- 1.3 % + Int, serial stream ##- 0.099 +- 3.6 % + +filter/map/sum trio +10000 elements +collection.immutable.HashSet + Int, base collection # 0.037 +- 9.0 % + Int, iterator on coll ##= 0.107 +- 0.6 % + Int, serial stream ##- 0.092 +- 0.9 % + +filter/map/sum trio +10000 elements +java.util.ArrayList + Int, serial stream ######- 0.256 +- 5.5 % + +filter/map/sum trio +10000 elements +java.util.LinkedList + Int, serial stream ####### 0.280 +- 3.4 % + +filter/map/sum trio +10000 elements +collection.mutable.LinkedHashSet + Int, base collection ## 0.082 +- 4.3 % + Int, iterator on coll ######= 0.261 +- 2.9 % + Int, serial stream ##= 0.112 +- 6.3 % + +filter/map/sum trio +10000 elements +collection.immutable.List + Int, base collection ###- 0.135 +- 1.8 % + Int, iterator on coll #####- 0.215 +- 1.6 % + Int, serial stream #= 0.060 +- 1.6 % + +filter/map/sum trio +10000 elements +collection.mutable.HashSet + Int, base collection # 0.046 +- 2.7 % + Int, iterator on coll ##= 0.111 +- 1.6 % + Int, serial stream ##= 0.109 +- 2.1 % + +filter/map/sum trio +10000 elements +collection.mutable.Queue + Int, base collection #- 0.059 +- 1.1 % + Int, iterator on coll ##### 0.204 +- 1.2 % + Int, serial stream #- 0.051 +- 2.4 % + +filter/map/sum trio +10000 elements +collection.mutable.PriorityQueue + Int, base collection ##= 0.103 +- 9.3 % + Int, iterator on coll ######- 0.248 +- 0.6 % + Int, serial stream ##- 0.088 +- 4.4 % + +filter/map/sum trio +10000 elements +collection.immutable.Queue + Int, base collection ## 0.079 +- 0.7 % + Int, iterator on coll #####- 0.211 +- 2.5 % + Int, serial stream ##= 0.113 +- 1.6 % + +filter/map/sum trio +10000 elements +collection.immutable.Stream + Int, base collection ##- 0.087 +- 2.4 % + Int, iterator on coll #- 0.050 +- 6.6 % + Int, serial stream ##- 0.089 +- 31.9 % + +filter/map/sum trio +10000 elements +collection.immutable.TreeSet + Int, base collection = 0.023 +- 2.5 % + Int, iterator on coll ###= 0.146 +- 1.6 % + Int, serial stream ## 0.083 +- 2.6 % + +filter/map/sum trio +10000 elements +collection.immutable.Vector + Int, base collection #### 0.159 +- 5.3 % + Int, iterator on coll ##### 0.206 +- 0.9 % + Int, serial stream ##= 0.104 +- 18.7 % + +filter/map/sum trio +10000 elements +collection.mutable.WrappedArray + Int, base collection ### 0.125 +- 1.9 % + Int, iterator on coll #### 0.157 +- 3.4 % + Int, serial stream ##- 0.091 +- 8.9 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +filter/map/sum trio +10 elements +collection.mutable.ArrayBuffer + String, base collection #### 0.153 +- 0.6 % + String, iterator on coll #########= 0.381 +- 1.0 % + String, serial stream ###- 0.138 +- 10.2 % + +filter/map/sum trio +10 elements +Array + String, base collection ######################### 1.000 +- 0.1 % + String, iterator on coll #########= 0.389 +- 1.0 % + String, serial stream ###= 0.142 +- 8.6 % + +filter/map/sum trio +10 elements +collection.mutable.ArraySeq + String, base collection ##- 0.094 +- 4.2 % + String, iterator on coll #########= 0.382 +- 0.4 % + String, serial stream ###- 0.136 +- 10.0 % + +filter/map/sum trio +10 elements +collection.mutable.ArrayStack + String, base collection ### 0.124 +- 1.3 % + String, iterator on coll ###########= 0.473 +- 1.2 % + String, serial stream ### 0.125 +- 10.5 % + +filter/map/sum trio +10 elements +collection.immutable.ListSet + String, base collection #- 0.054 +- 4.9 % + String, iterator on coll ######= 0.265 +- 1.2 % + String, serial stream ##= 0.102 +- 3.1 % + +filter/map/sum trio +10 elements +collection.immutable.HashSet + String, base collection ##- 0.093 +- 3.1 % + String, iterator on coll ##### 0.205 +- 2.7 % + String, serial stream ##- 0.097 +- 2.9 % + +filter/map/sum trio +10 elements +java.util.ArrayList + String, serial stream #### 0.159 +- 6.3 % + +filter/map/sum trio +10 elements +java.util.LinkedList + String, serial stream ###= 0.150 +- 4.1 % + +filter/map/sum trio +10 elements +collection.mutable.LinkedHashSet + String, base collection ###- 0.134 +- 1.4 % + String, iterator on coll ##########- 0.419 +- 0.7 % + String, serial stream ### 0.125 +- 10.9 % + +filter/map/sum trio +10 elements +collection.immutable.List + String, base collection ###- 0.138 +- 3.0 % + String, iterator on coll ########## 0.401 +- 1.1 % + String, serial stream ##= 0.105 +- 17.2 % + +filter/map/sum trio +10 elements +collection.mutable.HashSet + String, base collection ##= 0.102 +- 2.1 % + String, iterator on coll ######- 0.252 +- 0.6 % + String, serial stream ##- 0.091 +- 20.6 % + +filter/map/sum trio +10 elements +collection.mutable.Queue + String, base collection # 0.035 +- 1.9 % + String, iterator on coll ####- 0.178 +- 1.6 % + String, serial stream #- 0.058 +- 5.1 % + +filter/map/sum trio +10 elements +collection.mutable.PriorityQueue + String, base collection ### 0.117 +- 1.0 % + String, iterator on coll ########## 0.398 +- 2.4 % + String, serial stream ### 0.120 +- 10.1 % + +filter/map/sum trio +10 elements +collection.immutable.Queue + String, base collection ##= 0.102 +- 4.6 % + String, iterator on coll ######### 0.357 +- 1.5 % + String, serial stream ##- 0.097 +- 11.5 % + +filter/map/sum trio +10 elements +collection.immutable.Stream + String, base collection ##= 0.101 +- 7.1 % + String, iterator on coll #= 0.073 +- 5.8 % + String, serial stream ##= 0.110 +- 12.1 % + +filter/map/sum trio +10 elements +collection.immutable.TreeSet + String, base collection ##- 0.097 +- 1.6 % + String, iterator on coll ##### 0.194 +- 5.2 % + String, serial stream ##= 0.102 +- 7.8 % + +filter/map/sum trio +10 elements +collection.immutable.Vector + String, base collection ### 0.126 +- 0.8 % + String, iterator on coll #######- 0.299 +- 7.3 % + String, serial stream ### 0.123 +- 12.2 % + +filter/map/sum trio +10 elements +collection.mutable.WrappedArray + String, base collection ### 0.123 +- 1.1 % + String, iterator on coll ##########- 0.409 +- 0.4 % + String, serial stream ###- 0.135 +- 9.9 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +filter/map/sum trio +10000 elements +collection.mutable.ArrayBuffer + String, base collection ########- 0.328 +- 1.2 % + String, iterator on coll ############# 0.518 +- 0.6 % + String, serial stream ##########= 0.427 +- 1.0 % + +filter/map/sum trio +10000 elements +Array + String, base collection ######################### 1.000 +- 0.8 % + String, iterator on coll #############= 0.548 +- 0.9 % + String, serial stream ########### 0.442 +- 13.1 % + +filter/map/sum trio +10000 elements +collection.mutable.ArraySeq + String, base collection #######- 0.292 +- 5.3 % + String, iterator on coll ############# 0.523 +- 0.6 % + String, serial stream ########= 0.342 +- 17.6 % + +filter/map/sum trio +10000 elements +collection.mutable.ArrayStack + String, base collection ######## 0.325 +- 1.8 % + String, iterator on coll ############### 0.600 +- 0.8 % + String, serial stream ########- 0.338 +- 15.3 % + +filter/map/sum trio +10000 elements +collection.immutable.ListSet + String, base collection 0.003 +- 8.8 % + String, iterator on coll ############# 0.516 +- 1.7 % + String, serial stream #######= 0.306 +- 12.0 % + +filter/map/sum trio +10000 elements +collection.immutable.HashSet + String, base collection #- 0.054 +- 1.3 % + String, iterator on coll ### 0.118 +- 0.8 % + String, serial stream ##= 0.103 +- 0.5 % + +filter/map/sum trio +10000 elements +java.util.ArrayList + String, serial stream ############ 0.478 +- 8.2 % + +filter/map/sum trio +10000 elements +java.util.LinkedList + String, serial stream ############- 0.496 +- 8.7 % + +filter/map/sum trio +10000 elements +collection.mutable.LinkedHashSet + String, base collection ##- 0.091 +- 0.5 % + String, iterator on coll #############= 0.545 +- 1.1 % + String, serial stream #########= 0.392 +- 10.3 % + +filter/map/sum trio +10000 elements +collection.immutable.List + String, base collection ####- 0.175 +- 3.4 % + String, iterator on coll ############= 0.511 +- 2.1 % + String, serial stream #######= 0.301 +- 0.7 % + +filter/map/sum trio +10000 elements +collection.mutable.HashSet + String, base collection #- 0.047 +- 0.6 % + String, iterator on coll ##= 0.108 +- 0.5 % + String, serial stream ### 0.117 +- 0.3 % + +filter/map/sum trio +10000 elements +collection.mutable.Queue + String, base collection # 0.046 +- 0.7 % + String, iterator on coll ###########= 0.461 +- 1.3 % + String, serial stream ##- 0.097 +- 1.7 % + +filter/map/sum trio +10000 elements +collection.mutable.PriorityQueue + String, base collection ### 0.115 +- 10.9 % + String, iterator on coll #########- 0.376 +- 2.2 % + String, serial stream ###### 0.245 +- 1.0 % + +filter/map/sum trio +10000 elements +collection.immutable.Queue + String, base collection ###- 0.135 +- 2.0 % + String, iterator on coll ############# 0.516 +- 0.9 % + String, serial stream #####= 0.220 +- 2.2 % + +filter/map/sum trio +10000 elements +collection.immutable.Stream + String, base collection ### 0.120 +- 2.5 % + String, iterator on coll ## 0.086 +- 6.3 % + String, serial stream ###### 0.235 +- 5.3 % + +filter/map/sum trio +10000 elements +collection.immutable.TreeSet + String, base collection = 0.024 +- 0.9 % + String, iterator on coll ###= 0.146 +- 3.4 % + String, serial stream ###= 0.148 +- 0.6 % + +filter/map/sum trio +10000 elements +collection.immutable.Vector + String, base collection #####- 0.207 +- 2.9 % + String, iterator on coll #########= 0.382 +- 1.8 % + String, serial stream ######### 0.353 +- 24.5 % + +filter/map/sum trio +10000 elements +collection.mutable.WrappedArray + String, base collection ########- 0.334 +- 2.4 % + String, iterator on coll #############= 0.542 +- 0.9 % + String, serial stream #########- 0.376 +- 18.7 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +slow trig on hashCode +10 elements +collection.mutable.ArrayBuffer + String, base collection ########################= 0.982 +- 0.1 % + String, iterator on coll ########################= 0.993 +- 0.2 % + String, serial stream ########################= 0.989 +- 0.3 % + String Stepper (can par) ######################### 0.996 +- 0.2 % + +slow trig on hashCode +10 elements +Array + String, base collection ######################### 1.000 +- 0.1 % + String, iterator on coll ########################= 0.991 +- 0.2 % + String, serial stream ########################= 0.990 +- 0.3 % + String Stepper (can par) ######################### 0.996 +- 0.2 % + +slow trig on hashCode +10 elements +collection.mutable.ArraySeq + String, base collection #################= 0.704 +- 0.2 % + String, iterator on coll ########################= 0.993 +- 0.2 % + String, serial stream ########################= 0.989 +- 0.3 % + String Stepper (can par) ######################### 0.995 +- 0.2 % + +slow trig on hashCode +10 elements +collection.mutable.ArrayStack + String, base collection #################= 0.701 +- 0.2 % + String, iterator on coll ########################= 0.988 +- 0.3 % + String, serial stream ########################= 0.988 +- 0.2 % + String Stepper (seq only) ######################### 0.994 +- 0.2 % + +slow trig on hashCode +10 elements +collection.immutable.ListSet + String, base collection #######################- 0.935 +- 0.2 % + String, iterator on coll ######################### 0.994 +- 0.2 % + String, serial stream ########################= 0.985 +- 0.9 % + String Stepper (seq only) ########################= 0.992 +- 0.2 % + +slow trig on hashCode +10 elements +collection.immutable.HashSet + String, base collection #################- 0.689 +- 0.2 % + String, iterator on coll ########################= 0.989 +- 0.2 % + String, serial stream ########################= 0.991 +- 0.2 % + String Stepper (can par) ########################= 0.992 +- 0.2 % + +slow trig on hashCode +10 elements +java.util.ArrayList + String, serial stream ######################### 0.995 +- 0.3 % + +slow trig on hashCode +10 elements +java.util.LinkedList + String, serial stream ######################### 0.997 +- 0.2 % + +slow trig on hashCode +10 elements +collection.mutable.LinkedHashSet + String, base collection ######################## 0.960 +- 0.4 % + String, iterator on coll ########################= 0.987 +- 0.3 % + String, serial stream ########################= 0.992 +- 0.2 % + String Stepper (seq only) ########################= 0.993 +- 0.4 % + +slow trig on hashCode +10 elements +collection.immutable.List + String, base collection ########################= 0.985 +- 0.2 % + String, iterator on coll ######################### 0.993 +- 0.2 % + String, serial stream ########################= 0.993 +- 0.2 % + String Stepper (seq only) ######################### 0.994 +- 0.3 % + +slow trig on hashCode +10 elements +collection.mutable.HashSet + String, base collection ######################## 0.960 +- 0.3 % + String, iterator on coll ########################= 0.984 +- 0.2 % + String, serial stream ########################= 0.989 +- 0.2 % + String Stepper (can par) ########################= 0.992 +- 0.2 % + +slow trig on hashCode +10 elements +collection.mutable.Queue + String, base collection #######################- 0.927 +- 0.4 % + String, iterator on coll ######################### 0.994 +- 0.3 % + String, serial stream ########################- 0.972 +- 0.3 % + String Stepper (seq only) ########################- 0.973 +- 0.2 % + +slow trig on hashCode +10 elements +collection.mutable.PriorityQueue + String, base collection ########################= 0.983 +- 0.2 % + String, iterator on coll ######################### 0.993 +- 0.3 % + String, serial stream ########################= 0.989 +- 0.4 % + String Stepper (seq only) ######################### 0.995 +- 0.2 % + +slow trig on hashCode +10 elements +collection.immutable.Queue + String, base collection ######################## 0.966 +- 0.2 % + String, iterator on coll ######################### 0.994 +- 0.2 % + String, serial stream ########################= 0.986 +- 0.5 % + String Stepper (seq only) ######################### 0.993 +- 0.2 % + +slow trig on hashCode +10 elements +collection.immutable.Stream + String, base collection ######################## 0.956 +- 0.2 % + String, iterator on coll ########################- 0.975 +- 0.2 % + String, serial stream ########################= 0.987 +- 0.5 % + String Stepper (seq only) ######################### 0.994 +- 0.6 % + +slow trig on hashCode +10 elements +collection.immutable.TreeSet + String, base collection ################# 0.682 +- 0.2 % + String, iterator on coll ########################= 0.992 +- 0.2 % + String, serial stream ########################= 0.991 +- 0.2 % + String Stepper (seq only) ########################= 0.993 +- 0.3 % + +slow trig on hashCode +10 elements +collection.immutable.Vector + String, base collection ########################= 0.982 +- 0.2 % + String, iterator on coll ######################### 0.994 +- 0.2 % + String, serial stream ########################= 0.988 +- 0.3 % + String Stepper (can par) ########################= 0.991 +- 0.5 % + +slow trig on hashCode +10 elements +collection.mutable.WrappedArray + String, base collection ########################= 0.983 +- 0.2 % + String, iterator on coll ########################= 0.993 +- 0.2 % + String, serial stream ########################= 0.988 +- 0.4 % + String Stepper (can par) ######################### 0.995 +- 0.2 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +slow trig on hashCode +10000 elements +collection.mutable.ArrayBuffer + String, base collection ########################= 0.989 +- 0.2 % + String, iterator on coll ########################= 0.993 +- 0.2 % + String, serial stream ######################### 0.995 +- 0.2 % + String Stepper (can par) ######################### 0.995 +- 0.2 % + +slow trig on hashCode +10000 elements +Array + String, base collection ######################### 1.000 +- 0.1 % + String, iterator on coll ########################= 0.992 +- 0.2 % + String, serial stream ######################### 0.996 +- 0.1 % + String Stepper (can par) ######################### 0.996 +- 0.3 % + +slow trig on hashCode +10000 elements +collection.mutable.ArraySeq + String, base collection ##################### 0.839 +- 11.2 % + String, iterator on coll ########################= 0.992 +- 0.2 % + String, serial stream ######################### 0.994 +- 0.2 % + String Stepper (can par) ######################### 0.996 +- 0.2 % + +slow trig on hashCode +10000 elements +collection.mutable.ArrayStack + String, base collection #################= 0.713 +- 0.2 % + String, iterator on coll ########################= 0.993 +- 0.2 % + String, serial stream ######################### 0.994 +- 0.2 % + String Stepper (seq only) ######################### 0.995 +- 0.2 % + +slow trig on hashCode +10000 elements +collection.immutable.ListSet + String, base collection ######################= 0.913 +- 0.3 % + String, iterator on coll ########################= 0.992 +- 0.2 % + String, serial stream ########################= 0.991 +- 0.2 % + String Stepper (seq only) ########################= 0.986 +- 1.0 % + +slow trig on hashCode +10000 elements +collection.immutable.HashSet + String, base collection ###############= 0.627 +- 0.4 % + String, iterator on coll ########################= 0.982 +- 0.2 % + String, serial stream ########################- 0.970 +- 0.1 % + String Stepper (can par) ########################= 0.981 +- 0.5 % + +slow trig on hashCode +10000 elements +java.util.ArrayList + String, serial stream ######################### 0.996 +- 0.1 % + +slow trig on hashCode +10000 elements +java.util.LinkedList + String, serial stream ######################### 0.996 +- 0.1 % + +slow trig on hashCode +10000 elements +collection.mutable.LinkedHashSet + String, base collection #######################= 0.941 +- 0.2 % + String, iterator on coll ########################= 0.993 +- 0.2 % + String, serial stream ######################### 0.995 +- 0.1 % + String Stepper (seq only) ######################### 0.994 +- 0.4 % + +slow trig on hashCode +10000 elements +collection.immutable.List + String, base collection ########################= 0.987 +- 0.2 % + String, iterator on coll ######################### 0.994 +- 0.2 % + String, serial stream ######################### 0.996 +- 0.1 % + String Stepper (seq only) ######################### 0.996 +- 0.2 % + +slow trig on hashCode +10000 elements +collection.mutable.HashSet + String, base collection ####################### 0.916 +- 0.4 % + String, iterator on coll ########################- 0.977 +- 0.4 % + String, serial stream ########################- 0.979 +- 0.1 % + String Stepper (can par) ########################= 0.983 +- 0.4 % + +slow trig on hashCode +10000 elements +collection.mutable.Queue + String, base collection ######################## 0.957 +- 0.3 % + String, iterator on coll ########################= 0.992 +- 0.3 % + String, serial stream ########################- 0.977 +- 0.2 % + String Stepper (seq only) ########################- 0.971 +- 0.2 % + +slow trig on hashCode +10000 elements +collection.mutable.PriorityQueue + String, base collection #################= 0.711 +- 0.3 % + String, iterator on coll ########################= 0.988 +- 0.4 % + String, serial stream ########################= 0.989 +- 0.1 % + String Stepper (seq only) ########################= 0.990 +- 0.4 % + +slow trig on hashCode +10000 elements +collection.immutable.Queue + String, base collection ########################- 0.972 +- 0.3 % + String, iterator on coll ########################= 0.993 +- 0.3 % + String, serial stream ########################= 0.987 +- 0.7 % + String Stepper (seq only) ########################= 0.990 +- 0.2 % + +slow trig on hashCode +10000 elements +collection.immutable.Stream + String, base collection ######################## 0.954 +- 0.2 % + String, iterator on coll ########################- 0.975 +- 0.3 % + String, serial stream ########################= 0.992 +- 0.1 % + String Stepper (seq only) ########################= 0.993 +- 0.3 % + +slow trig on hashCode +10000 elements +collection.immutable.TreeSet + String, base collection ################ 0.635 +- 0.4 % + String, iterator on coll ########################= 0.983 +- 0.3 % + String, serial stream ########################= 0.981 +- 0.2 % + String Stepper (seq only) ########################= 0.983 +- 0.4 % + +slow trig on hashCode +10000 elements +collection.immutable.Vector + String, base collection ########################= 0.984 +- 0.2 % + String, iterator on coll ########################= 0.991 +- 0.2 % + String, serial stream ######################### 0.995 +- 0.1 % + String Stepper (can par) ######################### 0.995 +- 0.2 % + +slow trig on hashCode +10000 elements +collection.mutable.WrappedArray + String, base collection ########################= 0.986 +- 0.2 % + String, iterator on coll ########################= 0.992 +- 0.2 % + String, serial stream ######################### 0.995 +- 0.1 % + String Stepper (can par) ######################### 0.996 +- 0.2 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +map/filter/take trio +10 elements +collection.mutable.ArrayBuffer + Int, base collection # 0.043 +- 0.7 % + Int, iterator on coll ###### 0.237 +- 14.9 % + Int, serial stream #= 0.067 +- 3.1 % + +map/filter/take trio +10 elements +Array + Int, base collection ######################### 1.000 +- 0.9 % + Int, iterator on coll ######- 0.257 +- 25.2 % + Int, serial stream ## 0.076 +- 2.2 % + +map/filter/take trio +10 elements +collection.mutable.ArraySeq + Int, base collection = 0.031 +- 1.2 % + Int, iterator on coll #####- 0.213 +- 20.9 % + Int, serial stream #= 0.065 +- 7.1 % + +map/filter/take trio +10 elements +collection.mutable.ArrayStack + Int, base collection = 0.023 +- 1.6 % + Int, iterator on coll ######- 0.259 +- 15.7 % + Int, serial stream #= 0.066 +- 1.0 % + +map/filter/take trio +10 elements +java.util.ArrayList + Int, serial stream #- 0.051 +- 1.8 % + +map/filter/take trio +10 elements +java.util.LinkedList + Int, serial stream #- 0.052 +- 4.6 % + +map/filter/take trio +10 elements +collection.immutable.List + Int, base collection #= 0.063 +- 2.1 % + Int, iterator on coll ######= 0.267 +- 2.3 % + Int, serial stream #- 0.059 +- 1.9 % + +map/filter/take trio +10 elements +collection.mutable.Queue + Int, base collection - 0.013 +- 4.4 % + Int, iterator on coll ###- 0.130 +- 3.8 % + Int, serial stream = 0.027 +- 2.4 % + +map/filter/take trio +10 elements +collection.immutable.Queue + Int, base collection = 0.022 +- 0.8 % + Int, iterator on coll #####= 0.222 +- 13.2 % + Int, serial stream #- 0.050 +- 2.0 % + +map/filter/take trio +10 elements +collection.immutable.Stream + Int, base collection - 0.013 +- 3.4 % + Int, iterator on coll # 0.039 +- 5.1 % + Int, serial stream #- 0.058 +- 2.7 % + +map/filter/take trio +10 elements +collection.immutable.Vector + Int, base collection #- 0.058 +- 1.1 % + Int, iterator on coll ######- 0.254 +- 2.1 % + Int, serial stream #= 0.066 +- 2.4 % + +map/filter/take trio +10 elements +collection.mutable.WrappedArray + Int, base collection # 0.045 +- 0.6 % + Int, iterator on coll #####- 0.209 +- 14.9 % + Int, serial stream #= 0.063 +- 1.3 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +map/filter/take trio +10000 elements +collection.mutable.ArrayBuffer + Int, base collection 0.000 +- 3.5 % + Int, iterator on coll #= 0.067 +- 13.3 % + Int, serial stream #- 0.056 +- 1.9 % + +map/filter/take trio +10000 elements +Array + Int, base collection ######################### 1.000 +- 0.7 % + Int, iterator on coll ## 0.084 +- 10.5 % + Int, serial stream #= 0.063 +- 1.0 % + +map/filter/take trio +10000 elements +collection.mutable.ArraySeq + Int, base collection 0.000 +- 2.3 % + Int, iterator on coll #= 0.062 +- 0.4 % + Int, serial stream #- 0.056 +- 1.1 % + +map/filter/take trio +10000 elements +collection.mutable.ArrayStack + Int, base collection 0.000 +- 8.5 % + Int, iterator on coll ## 0.080 +- 17.5 % + Int, serial stream #- 0.053 +- 2.6 % + +map/filter/take trio +10000 elements +java.util.ArrayList + Int, serial stream # 0.042 +- 4.4 % + +map/filter/take trio +10000 elements +java.util.LinkedList + Int, serial stream # 0.041 +- 5.9 % + +map/filter/take trio +10000 elements +collection.immutable.List + Int, base collection 0.000 +- 1.0 % + Int, iterator on coll #= 0.072 +- 0.8 % + Int, serial stream #- 0.055 +- 10.6 % + +map/filter/take trio +10000 elements +collection.mutable.Queue + Int, base collection 0.000 +- 3.2 % + Int, iterator on coll #- 0.053 +- 28.4 % + Int, serial stream - 0.018 +- 1.7 % + +map/filter/take trio +10000 elements +collection.immutable.Queue + Int, base collection 0.000 +- 1.4 % + Int, iterator on coll ## 0.076 +- 10.0 % + Int, serial stream # 0.037 +- 1.2 % + +map/filter/take trio +10000 elements +collection.immutable.Stream + Int, base collection - 0.009 +- 6.4 % + Int, iterator on coll = 0.026 +- 4.1 % + Int, serial stream #- 0.056 +- 0.8 % + +map/filter/take trio +10000 elements +collection.immutable.Vector + Int, base collection 0.000 +- 2.0 % + Int, iterator on coll ## 0.082 +- 15.8 % + Int, serial stream #- 0.055 +- 3.9 % + +map/filter/take trio +10000 elements +collection.mutable.WrappedArray + Int, base collection 0.000 +- 8.8 % + Int, iterator on coll #- 0.053 +- 1.3 % + Int, serial stream #- 0.049 +- 0.7 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +map/filter/take trio +10 elements +collection.mutable.ArrayBuffer + String, base collection ############ 0.485 +- 0.8 % + String, iterator on coll #############- 0.531 +- 1.1 % + String, serial stream ############= 0.502 +- 3.1 % + +map/filter/take trio +10 elements +Array + String, base collection ######################### 1.000 +- 0.6 % + String, iterator on coll #############- 0.527 +- 1.7 % + String, serial stream ############# 0.516 +- 1.0 % + +map/filter/take trio +10 elements +collection.mutable.ArraySeq + String, base collection ##########- 0.414 +- 0.9 % + String, iterator on coll ############= 0.509 +- 3.8 % + String, serial stream #############= 0.545 +- 12.0 % + +map/filter/take trio +10 elements +collection.mutable.ArrayStack + String, base collection ########- 0.335 +- 1.8 % + String, iterator on coll #############= 0.551 +- 10.2 % + String, serial stream #############- 0.531 +- 10.7 % + +map/filter/take trio +10 elements +java.util.ArrayList + String, serial stream ############ 0.477 +- 2.2 % + +map/filter/take trio +10 elements +java.util.LinkedList + String, serial stream ############- 0.487 +- 5.2 % + +map/filter/take trio +10 elements +collection.immutable.List + String, base collection #############= 0.547 +- 5.1 % + String, iterator on coll ##############= 0.589 +- 1.4 % + String, serial stream #############- 0.528 +- 13.5 % + +map/filter/take trio +10 elements +collection.mutable.Queue + String, base collection ###### 0.235 +- 3.4 % + String, iterator on coll #############- 0.531 +- 1.2 % + String, serial stream #########= 0.389 +- 9.1 % + +map/filter/take trio +10 elements +collection.immutable.Queue + String, base collection ########- 0.336 +- 1.1 % + String, iterator on coll ##############= 0.589 +- 1.3 % + String, serial stream ###########- 0.458 +- 2.1 % + +map/filter/take trio +10 elements +collection.immutable.Stream + String, base collection ######- 0.250 +- 2.5 % + String, iterator on coll ##########- 0.415 +- 2.0 % + String, serial stream #############= 0.546 +- 12.1 % + +map/filter/take trio +10 elements +collection.immutable.Vector + String, base collection ############= 0.512 +- 6.6 % + String, iterator on coll ############# 0.525 +- 1.2 % + String, serial stream ############- 0.496 +- 1.0 % + +map/filter/take trio +10 elements +collection.mutable.WrappedArray + String, base collection ###########= 0.472 +- 4.5 % + String, iterator on coll #############- 0.527 +- 2.6 % + String, serial stream ############- 0.500 +- 1.0 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +map/filter/take trio +10000 elements +collection.mutable.ArrayBuffer + String, base collection 0.000 +- 9.2 % + String, iterator on coll ############## 0.555 +- 1.4 % + String, serial stream #############- 0.530 +- 1.4 % + +map/filter/take trio +10000 elements +Array + String, base collection ######################### 1.000 +- 8.4 % + String, iterator on coll #############= 0.542 +- 1.1 % + String, serial stream #############= 0.549 +- 3.1 % + +map/filter/take trio +10000 elements +collection.mutable.ArraySeq + String, base collection 0.000 +- 6.2 % + String, iterator on coll #############= 0.550 +- 2.2 % + String, serial stream ##############- 0.571 +- 12.9 % + +map/filter/take trio +10000 elements +collection.mutable.ArrayStack + String, base collection 0.000 +- 6.9 % + String, iterator on coll ###############= 0.624 +- 7.3 % + String, serial stream ############# 0.516 +- 1.8 % + +map/filter/take trio +10000 elements +java.util.ArrayList + String, serial stream ############# 0.522 +- 4.9 % + +map/filter/take trio +10000 elements +java.util.LinkedList + String, serial stream ############= 0.511 +- 1.6 % + +map/filter/take trio +10000 elements +collection.immutable.List + String, base collection 0.001 +- 9.9 % + String, iterator on coll ############### 0.600 +- 2.1 % + String, serial stream ############## 0.554 +- 5.2 % + +map/filter/take trio +10000 elements +collection.mutable.Queue + String, base collection 0.000 +- 7.9 % + String, iterator on coll ############## 0.562 +- 1.2 % + String, serial stream ########## 0.396 +- 9.2 % + +map/filter/take trio +10000 elements +collection.immutable.Queue + String, base collection 0.000 +- 3.8 % + String, iterator on coll ##############= 0.590 +- 2.3 % + String, serial stream #############= 0.546 +- 13.1 % + +map/filter/take trio +10000 elements +collection.immutable.Stream + String, base collection ######= 0.273 +- 2.0 % + String, iterator on coll ###########- 0.458 +- 3.7 % + String, serial stream ############### 0.606 +- 11.6 % + +map/filter/take trio +10000 elements +collection.immutable.Vector + String, base collection 0.001 +- 8.4 % + String, iterator on coll ############# 0.521 +- 1.4 % + String, serial stream #############= 0.540 +- 4.6 % + +map/filter/take trio +10000 elements +collection.mutable.WrappedArray + String, base collection 0.001 +- 1.7 % + String, iterator on coll #############= 0.544 +- 2.2 % + String, serial stream #############= 0.543 +- 3.3 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +fast sum of lengths +10 elements +collection.mutable.ArrayBuffer + String, base collection ########- 0.332 +- 0.8 % + String, iterator on coll ######## 0.316 +- 2.1 % + String, serial stream ######## 0.318 +- 1.3 % + String Stepper (can par) ##############= 0.582 +- 0.4 % + +fast sum of lengths +10 elements +Array + String, base collection ######################### 1.000 +- 0.4 % + String, iterator on coll #######- 0.287 +- 15.1 % + String, serial stream ######## 0.325 +- 1.5 % + String Stepper (can par) ################# 0.686 +- 0.4 % + +fast sum of lengths +10 elements +collection.mutable.ArraySeq + String, base collection ##################- 0.731 +- 1.2 % + String, iterator on coll ######## 0.319 +- 0.5 % + String, serial stream ######## 0.319 +- 0.6 % + String Stepper (can par) ##############= 0.582 +- 0.3 % + +fast sum of lengths +10 elements +collection.mutable.ArrayStack + String, base collection ##################- 0.729 +- 1.5 % + String, iterator on coll ########= 0.344 +- 1.0 % + String, serial stream ####### 0.282 +- 3.0 % + String Stepper (seq only) ############# 0.519 +- 9.1 % + +fast sum of lengths +10 elements +collection.immutable.ListSet + String, base collection #####- 0.219 +- 0.4 % + String, iterator on coll ####### 0.285 +- 0.9 % + String, serial stream ####- 0.179 +- 5.8 % + String Stepper (seq only) ########## 0.394 +- 1.2 % + +fast sum of lengths +10 elements +collection.immutable.HashSet + String, base collection ######= 0.266 +- 11.3 % + String, iterator on coll ####### 0.277 +- 11.4 % + String, serial stream #### 0.161 +- 1.3 % + String Stepper (can par) #####= 0.221 +- 3.2 % + +fast sum of lengths +10 elements +java.util.ArrayList + String, serial stream ########- 0.337 +- 0.4 % + +fast sum of lengths +10 elements +java.util.LinkedList + String, serial stream ######## 0.314 +- 1.6 % + +fast sum of lengths +10 elements +collection.mutable.LinkedHashSet + String, base collection ################= 0.668 +- 1.1 % + String, iterator on coll #########- 0.370 +- 0.9 % + String, serial stream #######- 0.292 +- 0.8 % + String Stepper (seq only) ############- 0.495 +- 0.9 % + +fast sum of lengths +10 elements +collection.immutable.List + String, base collection ###############- 0.613 +- 1.3 % + String, iterator on coll ########= 0.343 +- 0.4 % + String, serial stream #####- 0.214 +- 2.0 % + String Stepper (seq only) ########- 0.339 +- 2.4 % + +fast sum of lengths +10 elements +collection.mutable.HashSet + String, base collection ###########- 0.458 +- 4.6 % + String, iterator on coll ######- 0.248 +- 0.7 % + String, serial stream ##### 0.199 +- 0.9 % + String Stepper (can par) ##########= 0.432 +- 1.4 % + +fast sum of lengths +10 elements +collection.mutable.Queue + String, base collection ## 0.086 +- 0.8 % + String, iterator on coll ###### 0.245 +- 0.4 % + String, serial stream #= 0.069 +- 1.0 % + String Stepper (seq only) ## 0.084 +- 1.8 % + +fast sum of lengths +10 elements +collection.mutable.PriorityQueue + String, base collection ######## 0.314 +- 0.3 % + String, iterator on coll ######## 0.322 +- 0.5 % + String, serial stream ####### 0.285 +- 1.3 % + String Stepper (seq only) ############= 0.505 +- 0.8 % + +fast sum of lengths +10 elements +collection.immutable.Queue + String, base collection #####= 0.232 +- 0.4 % + String, iterator on coll ########- 0.330 +- 0.7 % + String, serial stream #### 0.166 +- 0.9 % + String Stepper (seq only) #####= 0.231 +- 1.1 % + +fast sum of lengths +10 elements +collection.immutable.Stream + String, base collection ###############= 0.629 +- 0.7 % + String, iterator on coll ## 0.087 +- 4.7 % + String, serial stream #####- 0.219 +- 1.6 % + String Stepper (seq only) ########- 0.331 +- 1.0 % + +fast sum of lengths +10 elements +collection.immutable.TreeSet + String, base collection ########- 0.338 +- 0.4 % + String, iterator on coll #####= 0.224 +- 0.7 % + String, serial stream ####- 0.173 +- 1.6 % + String Stepper (seq only) ######- 0.255 +- 3.4 % + +fast sum of lengths +10 elements +collection.immutable.Vector + String, base collection ####### 0.276 +- 0.5 % + String, iterator on coll #######= 0.307 +- 0.5 % + String, serial stream ###### 0.236 +- 1.2 % + String Stepper (can par) ##########- 0.418 +- 0.6 % + +fast sum of lengths +10 elements +collection.mutable.WrappedArray + String, base collection ########= 0.344 +- 0.3 % + String, iterator on coll ########- 0.329 +- 0.3 % + String, serial stream ######## 0.317 +- 0.5 % + String Stepper (can par) ##############= 0.583 +- 1.4 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +fast sum of lengths +10000 elements +collection.mutable.ArrayBuffer + String, base collection ########= 0.340 +- 0.9 % + String, iterator on coll ########- 0.334 +- 0.3 % + String, serial stream #################- 0.690 +- 33.6 % + String Stepper (can par) ############# 0.526 +- 2.1 % + +fast sum of lengths +10000 elements +Array + String, base collection ######################### 1.000 +- 0.6 % + String, iterator on coll ########- 0.328 +- 0.9 % + String, serial stream ################## 0.726 +- 27.9 % + String Stepper (can par) ################ 0.640 +- 4.6 % + +fast sum of lengths +10000 elements +collection.mutable.ArraySeq + String, base collection ####################= 0.830 +- 0.7 % + String, iterator on coll ########- 0.332 +- 0.4 % + String, serial stream ###################- 0.774 +- 24.4 % + String Stepper (can par) ############# 0.523 +- 1.9 % + +fast sum of lengths +10000 elements +collection.mutable.ArrayStack + String, base collection ################- 0.654 +- 6.7 % + String, iterator on coll ######### 0.362 +- 0.3 % + String, serial stream ################= 0.666 +- 25.4 % + String Stepper (seq only) ############# 0.515 +- 9.1 % + +fast sum of lengths +10000 elements +collection.immutable.ListSet + String, base collection #########- 0.379 +- 30.0 % + String, iterator on coll ######## 0.325 +- 16.5 % + String, serial stream ########### 0.436 +- 39.9 % + String Stepper (seq only) #########- 0.372 +- 6.0 % + +fast sum of lengths +10000 elements +collection.immutable.HashSet + String, base collection ### 0.125 +- 3.1 % + String, iterator on coll ### 0.123 +- 1.0 % + String, serial stream ### 0.118 +- 0.5 % + String Stepper (can par) ###- 0.135 +- 0.6 % + +fast sum of lengths +10000 elements +java.util.ArrayList + String, serial stream ####################= 0.821 +- 10.1 % + +fast sum of lengths +10000 elements +java.util.LinkedList + String, serial stream ################### 0.755 +- 3.9 % + +fast sum of lengths +10000 elements +collection.mutable.LinkedHashSet + String, base collection #################- 0.691 +- 2.3 % + String, iterator on coll ##########- 0.412 +- 11.4 % + String, serial stream #############= 0.551 +- 30.5 % + String Stepper (seq only) ########## 0.402 +- 3.8 % + +fast sum of lengths +10000 elements +collection.immutable.List + String, base collection #################= 0.713 +- 1.4 % + String, iterator on coll #########- 0.372 +- 11.2 % + String, serial stream ###########- 0.459 +- 0.6 % + String Stepper (seq only) #######= 0.304 +- 1.5 % + +fast sum of lengths +10000 elements +collection.mutable.HashSet + String, base collection ###- 0.127 +- 0.2 % + String, iterator on coll ##= 0.113 +- 0.3 % + String, serial stream ## 0.086 +- 0.4 % + String Stepper (can par) ###- 0.133 +- 0.9 % + +fast sum of lengths +10000 elements +collection.mutable.Queue + String, base collection #= 0.072 +- 1.4 % + String, iterator on coll #########- 0.367 +- 26.7 % + String, serial stream ### 0.115 +- 1.7 % + String Stepper (seq only) ## 0.080 +- 1.2 % + +fast sum of lengths +10000 elements +collection.mutable.PriorityQueue + String, base collection ########- 0.338 +- 30.0 % + String, iterator on coll ######## 0.319 +- 30.9 % + String, serial stream #### 0.166 +- 16.9 % + String Stepper (seq only) ###### 0.243 +- 5.6 % + +fast sum of lengths +10000 elements +collection.immutable.Queue + String, base collection #########= 0.381 +- 32.4 % + String, iterator on coll ########## 0.399 +- 11.0 % + String, serial stream ######- 0.254 +- 2.7 % + String Stepper (seq only) #####= 0.220 +- 0.8 % + +fast sum of lengths +10000 elements +collection.immutable.Stream + String, base collection #######- 0.287 +- 4.8 % + String, iterator on coll ##- 0.095 +- 7.2 % + String, serial stream #####= 0.220 +- 8.0 % + String Stepper (seq only) ######- 0.258 +- 1.7 % + +fast sum of lengths +10000 elements +collection.immutable.TreeSet + String, base collection ####= 0.180 +- 0.7 % + String, iterator on coll ##### 0.194 +- 8.2 % + String, serial stream ##= 0.110 +- 0.9 % + String Stepper (seq only) ####= 0.190 +- 2.3 % + +fast sum of lengths +10000 elements +collection.immutable.Vector + String, base collection ##########- 0.412 +- 15.0 % + String, iterator on coll ######### 0.355 +- 22.0 % + String, serial stream ###########= 0.460 +- 16.9 % + String Stepper (can par) ###########- 0.452 +- 3.0 % + +fast sum of lengths +10000 elements +collection.mutable.WrappedArray + String, base collection ########= 0.353 +- 0.3 % + String, iterator on coll ########- 0.340 +- 0.8 % + String, serial stream ################# 0.683 +- 32.7 % + String Stepper (can par) #############- 0.533 +- 2.6 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +parallel filter/map/sum trio +10 elements +collection.mutable.ArrayBuffer + Int, par collection ########################- 0.968 +- 3.4 % + Int, parallel stream ##############################~~# 4.299 +- 2.7 % + +parallel filter/map/sum trio +10 elements +Array + Int, par collection ######################### 1.000 +- 2.1 % + Int, parallel stream ##############################~~# 4.361 +- 1.2 % + +parallel filter/map/sum trio +10 elements +collection.mutable.ArraySeq + Int, par collection #########################- 1.014 +- 1.2 % + Int, parallel stream ##############################~~# 4.362 +- 2.0 % + +parallel filter/map/sum trio +10 elements +collection.immutable.HashSet + Int, par collection ####################= 0.830 +- 2.2 % + Int, parallel stream ##############################~~# 3.569 +- 1.5 % + +parallel filter/map/sum trio +10 elements +java.util.ArrayList + Int, parallel stream ##############################~~# 4.389 +- 1.1 % + +parallel filter/map/sum trio +10 elements +java.util.LinkedList + Int, parallel stream ##############################~~# 4.299 +- 2.4 % + +parallel filter/map/sum trio +10 elements +collection.mutable.HashSet + Int, par collection ########################### 1.079 +- 1.1 % + Int, parallel stream ##############################~~# 5.277 +- 2.3 % + +parallel filter/map/sum trio +10 elements +collection.immutable.Vector + Int, par collection #####################- 0.847 +- 1.7 % + Int, parallel stream ##############################~~# 4.289 +- 1.2 % + +parallel filter/map/sum trio +10 elements +collection.mutable.WrappedArray + Int, par collection ######################### 1.003 +- 1.4 % + Int, parallel stream ##############################~~# 4.399 +- 0.6 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +parallel filter/map/sum trio +10000 elements +collection.mutable.ArrayBuffer + Int, par collection ##############################~~# 1.469 +- 1.3 % + Int, parallel stream ##############################~~# 6.310 +- 10.2 % + +parallel filter/map/sum trio +10000 elements +Array + Int, par collection ######################### 1.000 +- 1.0 % + Int, parallel stream ##############################~~# 11.043 +- 1.0 % + +parallel filter/map/sum trio +10000 elements +collection.mutable.ArraySeq + Int, par collection ##############################~~# 1.469 +- 1.5 % + Int, parallel stream ##############################~~# 6.301 +- 4.2 % + +parallel filter/map/sum trio +10000 elements +collection.immutable.HashSet + Int, par collection ################= 0.664 +- 0.8 % + Int, parallel stream ##############################~~# 3.468 +- 1.0 % + +parallel filter/map/sum trio +10000 elements +java.util.ArrayList + Int, parallel stream ##############################~~# 9.264 +- 3.0 % + +parallel filter/map/sum trio +10000 elements +java.util.LinkedList + Int, parallel stream ##############################~~# 4.247 +- 10.6 % + +parallel filter/map/sum trio +10000 elements +collection.mutable.HashSet + Int, par collection ################## 0.727 +- 1.2 % + Int, parallel stream ##############################~~# 3.579 +- 7.4 % + +parallel filter/map/sum trio +10000 elements +collection.immutable.Vector + Int, par collection ##############################- 1.216 +- 2.2 % + Int, parallel stream ##############################~~# 5.987 +- 3.6 % + +parallel filter/map/sum trio +10000 elements +collection.mutable.WrappedArray + Int, par collection ########################### 1.074 +- 1.1 % + Int, parallel stream ##############################~~# 5.150 +- 0.7 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +parallel filter/map/sum trio +10 elements +collection.mutable.ArrayBuffer + String, par collection #########################- 1.014 +- 0.8 % + String, parallel stream ##############################~~# 4.219 +- 3.2 % + +parallel filter/map/sum trio +10 elements +Array + String, par collection ######################### 1.000 +- 2.8 % + String, parallel stream ##############################~~# 4.299 +- 1.1 % + +parallel filter/map/sum trio +10 elements +collection.mutable.ArraySeq + String, par collection ######################### 1.002 +- 1.6 % + String, parallel stream ##############################~~# 4.326 +- 1.5 % + +parallel filter/map/sum trio +10 elements +collection.immutable.HashSet + String, par collection #####################= 0.861 +- 1.1 % + String, parallel stream ##############################~~# 3.656 +- 1.1 % + +parallel filter/map/sum trio +10 elements +java.util.ArrayList + String, parallel stream ##############################~~# 4.173 +- 3.2 % + +parallel filter/map/sum trio +10 elements +java.util.LinkedList + String, parallel stream ##############################~~# 4.073 +- 3.0 % + +parallel filter/map/sum trio +10 elements +collection.mutable.HashSet + String, par collection ######################## 0.964 +- 1.3 % + String, parallel stream ##############################~~# 4.613 +- 1.2 % + +parallel filter/map/sum trio +10 elements +collection.immutable.Vector + String, par collection #####################- 0.857 +- 1.2 % + String, parallel stream ##############################~~# 4.221 +- 1.6 % + +parallel filter/map/sum trio +10 elements +collection.mutable.WrappedArray + String, par collection ########################= 0.982 +- 2.7 % + String, parallel stream ##############################~~# 4.370 +- 1.6 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +parallel filter/map/sum trio +10000 elements +collection.mutable.ArrayBuffer + String, par collection ######################### 0.994 +- 1.4 % + String, parallel stream ##############################~~# 5.317 +- 7.5 % + +parallel filter/map/sum trio +10000 elements +Array + String, par collection ######################### 1.000 +- 1.3 % + String, parallel stream ##############################~~# 6.147 +- 1.0 % + +parallel filter/map/sum trio +10000 elements +collection.mutable.ArraySeq + String, par collection ######################### 1.004 +- 1.5 % + String, parallel stream ##############################~~# 5.464 +- 5.9 % + +parallel filter/map/sum trio +10000 elements +collection.immutable.HashSet + String, par collection ########### 0.434 +- 2.2 % + String, parallel stream ##############################~~# 2.221 +- 6.4 % + +parallel filter/map/sum trio +10000 elements +java.util.ArrayList + String, parallel stream ##############################~~# 6.477 +- 0.4 % + +parallel filter/map/sum trio +10000 elements +java.util.LinkedList + String, parallel stream ##############################~~# 2.252 +- 10.3 % + +parallel filter/map/sum trio +10000 elements +collection.mutable.HashSet + String, par collection ###########= 0.470 +- 0.9 % + String, parallel stream ##############################~~# 2.477 +- 8.0 % + +parallel filter/map/sum trio +10000 elements +collection.immutable.Vector + String, par collection ###################= 0.786 +- 0.7 % + String, parallel stream ##############################~~# 5.795 +- 1.1 % + +parallel filter/map/sum trio +10000 elements +collection.mutable.WrappedArray + String, par collection ########################= 0.985 +- 1.7 % + String, parallel stream ##############################~~# 5.575 +- 5.4 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +slow parallel trig on hashCode +10 elements +collection.mutable.ArrayBuffer + String, par collection ######################### 0.999 +- 1.3 % + String, parallel stream ##############################~~# 3.106 +- 0.7 % + +slow parallel trig on hashCode +10 elements +Array + String, par collection ######################### 1.000 +- 1.1 % + String, parallel stream ##############################~~# 3.144 +- 0.8 % + +slow parallel trig on hashCode +10 elements +collection.mutable.ArraySeq + String, par collection #########################- 1.016 +- 1.1 % + String, parallel stream ##############################~~# 3.117 +- 1.0 % + +slow parallel trig on hashCode +10 elements +collection.immutable.HashSet + String, par collection ################ 0.638 +- 1.3 % + String, parallel stream ##############################~~# 2.879 +- 0.6 % + +slow parallel trig on hashCode +10 elements +java.util.ArrayList + String, parallel stream ##############################~~# 3.144 +- 0.7 % + +slow parallel trig on hashCode +10 elements +java.util.LinkedList + String, parallel stream ##############################~~# 3.126 +- 1.0 % + +slow parallel trig on hashCode +10 elements +collection.mutable.HashSet + String, par collection #######################- 0.937 +- 1.0 % + String, parallel stream ##############################~~# 2.938 +- 0.5 % + +slow parallel trig on hashCode +10 elements +collection.immutable.Vector + String, par collection ##################= 0.741 +- 1.5 % + String, parallel stream ##############################~~# 3.139 +- 1.0 % + +slow parallel trig on hashCode +10 elements +collection.mutable.WrappedArray + String, par collection ######################### 1.000 +- 1.0 % + String, parallel stream ##############################~~# 3.104 +- 1.1 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +slow parallel trig on hashCode +10000 elements +collection.mutable.ArrayBuffer + String, par collection ########################- 0.978 +- 2.1 % + String, parallel stream ##############################- 1.209 +- 0.9 % + +slow parallel trig on hashCode +10000 elements +Array + String, par collection ######################### 1.000 +- 0.9 % + String, parallel stream ##############################- 1.211 +- 0.9 % + +slow parallel trig on hashCode +10000 elements +collection.mutable.ArraySeq + String, par collection ########################- 0.975 +- 2.2 % + String, parallel stream ##############################- 1.215 +- 0.7 % + +slow parallel trig on hashCode +10000 elements +collection.immutable.HashSet + String, par collection ####################- 0.809 +- 1.9 % + String, parallel stream ############################- 1.129 +- 0.5 % + +slow parallel trig on hashCode +10000 elements +java.util.ArrayList + String, parallel stream ############################## 1.200 +- 2.0 % + +slow parallel trig on hashCode +10000 elements +java.util.LinkedList + String, parallel stream ############################ 1.121 +- 0.6 % + +slow parallel trig on hashCode +10000 elements +collection.mutable.HashSet + String, par collection ##################- 0.734 +- 1.8 % + String, parallel stream #############################- 1.176 +- 0.4 % + +slow parallel trig on hashCode +10000 elements +collection.immutable.Vector + String, par collection #################- 0.698 +- 1.3 % + String, parallel stream ##############################- 1.216 +- 0.6 % + +slow parallel trig on hashCode +10000 elements +collection.mutable.WrappedArray + String, par collection ########################- 0.969 +- 2.4 % + String, parallel stream ##############################- 1.213 +- 0.7 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +fast parallel sum of lengths +10 elements +collection.mutable.ArrayBuffer + String, par collection ########################## 1.036 +- 6.6 % + String, parallel stream ##############################~~# 1.759 +- 1.2 % + +fast parallel sum of lengths +10 elements +Array + String, par collection ######################### 1.000 +- 6.4 % + String, parallel stream ##############################~~# 1.740 +- 1.8 % + +fast parallel sum of lengths +10 elements +collection.mutable.ArraySeq + String, par collection ###########################- 1.099 +- 12.3 % + String, parallel stream ##############################~~# 1.748 +- 2.1 % + +fast parallel sum of lengths +10 elements +collection.immutable.HashSet + String, par collection #########################- 1.019 +- 2.9 % + String, parallel stream ##############################~~# 1.470 +- 1.1 % + +fast parallel sum of lengths +10 elements +java.util.ArrayList + String, parallel stream ##############################~~# 1.729 +- 1.0 % + +fast parallel sum of lengths +10 elements +java.util.LinkedList + String, parallel stream ##############################~~# 1.690 +- 1.4 % + +fast parallel sum of lengths +10 elements +collection.mutable.HashSet + String, par collection ###########################- 1.092 +- 3.1 % + String, parallel stream ##############################~~# 1.902 +- 3.4 % + +fast parallel sum of lengths +10 elements +collection.immutable.Vector + String, par collection ######################= 0.908 +- 3.1 % + String, parallel stream ##############################~~# 1.761 +- 1.4 % + +fast parallel sum of lengths +10 elements +collection.mutable.WrappedArray + String, par collection #########################= 1.032 +- 10.0 % + String, parallel stream ##############################~~# 1.751 +- 1.7 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +fast parallel sum of lengths +10000 elements +collection.mutable.ArrayBuffer + String, par collection ######################### 1.002 +- 1.0 % + String, parallel stream ############################ 1.122 +- 0.9 % + +fast parallel sum of lengths +10000 elements +Array + String, par collection ######################### 1.000 +- 0.9 % + String, parallel stream ################################ 1.284 +- 1.7 % + +fast parallel sum of lengths +10000 elements +collection.mutable.ArraySeq + String, par collection ########################= 0.992 +- 1.6 % + String, parallel stream ################################- 1.287 +- 6.4 % + +fast parallel sum of lengths +10000 elements +collection.immutable.HashSet + String, par collection ##########- 0.408 +- 0.9 % + String, parallel stream ################- 0.648 +- 0.6 % + +fast parallel sum of lengths +10000 elements +java.util.ArrayList + String, parallel stream ##############################~~# 1.432 +- 13.1 % + +fast parallel sum of lengths +10000 elements +java.util.LinkedList + String, parallel stream #############= 0.550 +- 12.9 % + +fast parallel sum of lengths +10000 elements +collection.mutable.HashSet + String, par collection ##########- 0.411 +- 0.8 % + String, parallel stream ##############= 0.583 +- 0.6 % + +fast parallel sum of lengths +10000 elements +collection.immutable.Vector + String, par collection ################### 0.759 +- 1.2 % + String, parallel stream ##############################= 1.221 +- 0.8 % + +fast parallel sum of lengths +10000 elements +collection.mutable.WrappedArray + String, par collection ######################### 0.999 +- 1.0 % + String, parallel stream ################################- 1.294 +- 7.0 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +fast parallel summation +10 elements +collection.mutable.ArrayBuffer + Int, par collection ######################- 0.890 +- 6.7 % + Int, parallel stream ##############################~~# 1.483 +- 1.2 % + +fast parallel summation +10 elements +Array + Int, par collection ######################### 1.000 +- 6.0 % + Int, parallel stream ##############################~~# 1.467 +- 1.2 % + +fast parallel summation +10 elements +collection.mutable.ArraySeq + Int, par collection ######################= 0.904 +- 6.7 % + Int, parallel stream ##############################~~# 1.471 +- 1.4 % + +fast parallel summation +10 elements +collection.immutable.HashSet + Int, par collection ##################- 0.737 +- 5.1 % + Int, parallel stream ###############################- 1.257 +- 1.4 % + +fast parallel summation +10 elements +java.util.ArrayList + Int, parallel stream ##############################~~# 1.497 +- 1.0 % + +fast parallel summation +10 elements +java.util.LinkedList + Int, parallel stream ##############################~~# 1.463 +- 1.4 % + +fast parallel summation +10 elements +collection.mutable.HashSet + Int, par collection ##########################- 1.060 +- 1.0 % + Int, parallel stream ##############################~~# 1.800 +- 1.4 % + +fast parallel summation +10 elements +collection.immutable.Vector + Int, par collection ####################= 0.829 +- 0.7 % + Int, parallel stream ##############################~~# 1.459 +- 2.1 % + +fast parallel summation +10 elements +collection.mutable.WrappedArray + Int, par collection #########################- 1.008 +- 7.5 % + Int, parallel stream ##############################~~# 1.489 +- 0.8 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +fast parallel summation +10000 elements +collection.mutable.ArrayBuffer + Int, par collection ##############################~~# 2.489 +- 0.8 % + Int, parallel stream ##############################~~# 3.404 +- 1.4 % + +fast parallel summation +10000 elements +Array + Int, par collection ######################### 1.000 +- 9.6 % + Int, parallel stream ##############################~~# 6.862 +- 4.9 % + +fast parallel summation +10000 elements +collection.mutable.ArraySeq + Int, par collection ##############################~~# 2.482 +- 0.8 % + Int, parallel stream ##############################~~# 3.686 +- 0.8 % + +fast parallel summation +10000 elements +collection.immutable.HashSet + Int, par collection ################################- 1.300 +- 1.0 % + Int, parallel stream ##############################~~# 1.898 +- 0.5 % + +fast parallel summation +10000 elements +java.util.ArrayList + Int, parallel stream ##############################~~# 5.781 +- 16.3 % + +fast parallel summation +10000 elements +java.util.LinkedList + Int, parallel stream ##############################~~# 2.435 +- 8.6 % + +fast parallel summation +10000 elements +collection.mutable.HashSet + Int, par collection ###############################- 1.259 +- 0.9 % + Int, parallel stream ##############################~~# 1.749 +- 0.4 % + +fast parallel summation +10000 elements +collection.immutable.Vector + Int, par collection ##############################~~# 1.993 +- 1.1 % + Int, parallel stream ##############################~~# 3.596 +- 0.9 % + +fast parallel summation +10000 elements +collection.mutable.WrappedArray + Int, par collection ###############################= 1.272 +- 4.8 % + Int, parallel stream ##############################~~# 2.706 +- 0.6 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +slow parallel trigonometry +10 elements +collection.mutable.ArrayBuffer + Int, par collection ######################### 0.999 +- 1.6 % + Int, parallel stream ##############################~~# 3.251 +- 0.9 % + +slow parallel trigonometry +10 elements +Array + Int, par collection ######################### 1.000 +- 1.0 % + Int, parallel stream ##############################~~# 3.257 +- 0.6 % + +slow parallel trigonometry +10 elements +collection.mutable.ArraySeq + Int, par collection #########################- 1.009 +- 1.4 % + Int, parallel stream ##############################~~# 3.252 +- 0.9 % + +slow parallel trigonometry +10 elements +collection.immutable.HashSet + Int, par collection ###############- 0.610 +- 1.2 % + Int, parallel stream ##############################~~# 2.825 +- 0.9 % + +slow parallel trigonometry +10 elements +java.util.ArrayList + Int, parallel stream ##############################~~# 3.210 +- 1.3 % + +slow parallel trigonometry +10 elements +java.util.LinkedList + Int, parallel stream ##############################~~# 3.196 +- 0.4 % + +slow parallel trigonometry +10 elements +collection.mutable.HashSet + Int, par collection ########################- 0.969 +- 2.4 % + Int, parallel stream ##############################~~# 3.086 +- 0.4 % + +slow parallel trigonometry +10 elements +collection.immutable.Vector + Int, par collection ################## 0.721 +- 2.2 % + Int, parallel stream ##############################~~# 3.180 +- 0.9 % + +slow parallel trigonometry +10 elements +collection.mutable.WrappedArray + Int, par collection ######################### 1.000 +- 1.1 % + Int, parallel stream ##############################~~# 3.259 +- 1.1 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +slow parallel trigonometry +10000 elements +collection.mutable.ArrayBuffer + Int, par collection ###########################- 1.096 +- 0.7 % + Int, parallel stream ##############################~~# 1.369 +- 0.5 % + +slow parallel trigonometry +10000 elements +Array + Int, par collection ######################### 1.000 +- 2.1 % + Int, parallel stream ##############################~~# 1.385 +- 0.7 % + +slow parallel trigonometry +10000 elements +collection.mutable.ArraySeq + Int, par collection ########################### 1.078 +- 2.1 % + Int, parallel stream ##############################~~# 1.347 +- 2.6 % + +slow parallel trigonometry +10000 elements +collection.immutable.HashSet + Int, par collection #####################- 0.854 +- 2.2 % + Int, parallel stream ################################- 1.290 +- 0.7 % + +slow parallel trigonometry +10000 elements +java.util.ArrayList + Int, parallel stream ##############################~~# 1.381 +- 0.9 % + +slow parallel trigonometry +10000 elements +java.util.LinkedList + Int, parallel stream ###############################= 1.273 +- 0.6 % + +slow parallel trigonometry +10000 elements +collection.mutable.HashSet + Int, par collection ##################- 0.731 +- 3.7 % + Int, parallel stream ##############################~~# 1.338 +- 0.5 % + +slow parallel trigonometry +10000 elements +collection.immutable.Vector + Int, par collection #################- 0.699 +- 1.7 % + Int, parallel stream ##############################~~# 1.358 +- 1.2 % + +slow parallel trigonometry +10000 elements +collection.mutable.WrappedArray + Int, par collection #########################- 1.018 +- 2.1 % + Int, parallel stream ##############################~~# 1.356 +- 0.7 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +fast summation +10 elements +collection.mutable.ArrayBuffer + Int, base collection #### 0.162 +- 4.7 % + Int, iterator on coll ### 0.121 +- 1.4 % + Int, serial stream ## 0.086 +- 2.3 % + Int Stepper (can par) ###### 0.236 +- 1.3 % + +fast summation +10 elements +Array + Int, base collection ######################### 1.000 +- 0.2 % + Int, iterator on coll ##- 0.097 +- 2.3 % + Int, serial stream #####- 0.217 +- 1.0 % + Int Stepper (can par) #####################- 0.855 +- 0.3 % + +fast summation +10 elements +collection.mutable.ArraySeq + Int, base collection ####- 0.167 +- 0.3 % + Int, iterator on coll ###- 0.140 +- 2.3 % + Int, serial stream ## 0.084 +- 4.9 % + Int Stepper (can par) #############= 0.552 +- 0.4 % + +fast summation +10 elements +collection.mutable.ArrayStack + Int, base collection ####- 0.169 +- 4.1 % + Int, iterator on coll #### 0.157 +- 2.3 % + Int, serial stream ## 0.074 +- 4.7 % + Int Stepper (seq only) ######- 0.258 +- 1.3 % + +fast summation +10 elements +collection.immutable.ListSet + Int, base collection ##= 0.107 +- 0.9 % + Int, iterator on coll #### 0.163 +- 1.3 % + Int, serial stream ###= 0.140 +- 2.2 % + Int Stepper (seq only) ##= 0.111 +- 18.1 % + +fast summation +10 elements +collection.immutable.HashSet + Int, base collection ###= 0.148 +- 3.7 % + Int, iterator on coll ## 0.086 +- 0.8 % + Int, serial stream ### 0.120 +- 1.1 % + Int Stepper (can par) ### 0.119 +- 4.0 % + +fast summation +10 elements +java.util.ArrayList + Int, serial stream #####= 0.232 +- 0.6 % + +fast summation +10 elements +java.util.LinkedList + Int, serial stream #####- 0.214 +- 0.5 % + +fast summation +10 elements +collection.mutable.LinkedHashSet + Int, base collection #### 0.159 +- 5.0 % + Int, iterator on coll #### 0.157 +- 2.3 % + Int, serial stream ## 0.080 +- 2.9 % + Int Stepper (seq only) ####- 0.168 +- 0.5 % + +fast summation +10 elements +collection.immutable.List + Int, base collection ######= 0.263 +- 0.2 % + Int, iterator on coll #### 0.155 +- 1.4 % + Int, serial stream ### 0.121 +- 2.4 % + Int Stepper (seq only) ### 0.117 +- 2.1 % + +fast summation +10 elements +collection.mutable.HashSet + Int, base collection ##= 0.109 +- 2.1 % + Int, iterator on coll #= 0.070 +- 0.3 % + Int, serial stream ##- 0.093 +- 0.8 % + Int Stepper (can par) #######= 0.308 +- 0.5 % + +fast summation +10 elements +collection.mutable.Queue + Int, base collection # 0.041 +- 0.8 % + Int, iterator on coll ##- 0.088 +- 0.3 % + Int, serial stream # 0.034 +- 7.5 % + Int Stepper (seq only) #- 0.055 +- 0.8 % + +fast summation +10 elements +collection.mutable.PriorityQueue + Int, base collection ##= 0.104 +- 2.7 % + Int, iterator on coll ###- 0.133 +- 2.1 % + Int, serial stream #= 0.073 +- 2.1 % + Int Stepper (seq only) ###- 0.136 +- 0.4 % + +fast summation +10 elements +collection.immutable.Queue + Int, base collection ##= 0.108 +- 0.7 % + Int, iterator on coll ###= 0.146 +- 0.8 % + Int, serial stream ## 0.081 +- 18.1 % + Int Stepper (seq only) #- 0.058 +- 1.4 % + +fast summation +10 elements +collection.immutable.Stream + Int, base collection ######= 0.261 +- 0.2 % + Int, iterator on coll # 0.035 +- 5.4 % + Int, serial stream ### 0.116 +- 1.7 % + Int Stepper (seq only) ##= 0.113 +- 0.3 % + +fast summation +10 elements +collection.immutable.TreeSet + Int, base collection #### 0.166 +- 0.6 % + Int, iterator on coll ## 0.081 +- 0.6 % + Int, serial stream ##= 0.102 +- 1.5 % + Int Stepper (seq only) ##- 0.097 +- 0.3 % + +fast summation +10 elements +collection.immutable.Vector + Int, base collection ##= 0.104 +- 0.5 % + Int, iterator on coll ###- 0.130 +- 2.2 % + Int, serial stream ## 0.078 +- 2.1 % + Int Stepper (can par) ######### 0.359 +- 0.8 % + +fast summation +10 elements +collection.mutable.WrappedArray + Int, base collection ###= 0.141 +- 0.3 % + Int, iterator on coll ##- 0.096 +- 5.1 % + Int, serial stream ## 0.079 +- 0.4 % + Int Stepper (can par) ####= 0.186 +- 1.5 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +fast summation +10000 elements +collection.mutable.ArrayBuffer + Int, base collection # 0.043 +- 2.8 % + Int, iterator on coll # 0.038 +- 8.6 % + Int, serial stream = 0.027 +- 14.9 % + Int Stepper (can par) ## 0.083 +- 0.5 % + +fast summation +10000 elements +Array + Int, base collection ######################### 1.000 +- 0.1 % + Int, iterator on coll = 0.027 +- 5.5 % + Int, serial stream ###- 0.128 +- 0.3 % + Int Stepper (can par) ######################### 1.000 +- 0.1 % + +fast summation +10000 elements +collection.mutable.ArraySeq + Int, base collection # 0.044 +- 4.0 % + Int, iterator on coll # 0.042 +- 2.9 % + Int, serial stream = 0.026 +- 15.1 % + Int Stepper (can par) #######= 0.309 +- 1.5 % + +fast summation +10000 elements +collection.mutable.ArrayStack + Int, base collection # 0.043 +- 15.3 % + Int, iterator on coll # 0.044 +- 1.8 % + Int, serial stream = 0.026 +- 11.4 % + Int Stepper (seq only) ##- 0.089 +- 0.6 % + +fast summation +10000 elements +collection.immutable.ListSet + Int, base collection #- 0.058 +- 1.8 % + Int, iterator on coll #- 0.059 +- 2.7 % + Int, serial stream = 0.022 +- 2.4 % + Int Stepper (seq only) #- 0.048 +- 35.9 % + +fast summation +10000 elements +collection.immutable.HashSet + Int, base collection = 0.028 +- 1.6 % + Int, iterator on coll = 0.023 +- 1.2 % + Int, serial stream - 0.014 +- 0.9 % + Int Stepper (can par) - 0.016 +- 1.2 % + +fast summation +10000 elements +java.util.ArrayList + Int, serial stream ######- 0.252 +- 11.9 % + +fast summation +10000 elements +java.util.LinkedList + Int, serial stream ##= 0.103 +- 7.4 % + +fast summation +10000 elements +collection.mutable.LinkedHashSet + Int, base collection # 0.044 +- 12.3 % + Int, iterator on coll #- 0.047 +- 2.8 % + Int, serial stream = 0.026 +- 14.3 % + Int Stepper (seq only) #- 0.049 +- 9.0 % + +fast summation +10000 elements +collection.immutable.List + Int, base collection #- 0.053 +- 9.6 % + Int, iterator on coll #- 0.055 +- 3.4 % + Int, serial stream # 0.040 +- 1.0 % + Int Stepper (seq only) #- 0.048 +- 34.9 % + +fast summation +10000 elements +collection.mutable.HashSet + Int, base collection = 0.021 +- 8.1 % + Int, iterator on coll = 0.023 +- 5.8 % + Int, serial stream - 0.011 +- 0.8 % + Int Stepper (can par) #= 0.067 +- 14.8 % + +fast summation +10000 elements +collection.mutable.Queue + Int, base collection - 0.009 +- 1.0 % + Int, iterator on coll = 0.028 +- 4.4 % + Int, serial stream - 0.012 +- 1.3 % + Int Stepper (seq only) - 0.016 +- 1.0 % + +fast summation +10000 elements +collection.mutable.PriorityQueue + Int, base collection # 0.034 +- 29.3 % + Int, iterator on coll # 0.043 +- 1.3 % + Int, serial stream = 0.024 +- 8.8 % + Int Stepper (seq only) # 0.036 +- 0.3 % + +fast summation +10000 elements +collection.immutable.Queue + Int, base collection #- 0.056 +- 3.3 % + Int, iterator on coll #- 0.055 +- 2.0 % + Int, serial stream = 0.026 +- 1.2 % + Int Stepper (seq only) - 0.017 +- 1.1 % + +fast summation +10000 elements +collection.immutable.Stream + Int, base collection # 0.042 +- 11.4 % + Int, iterator on coll - 0.010 +- 8.4 % + Int, serial stream = 0.022 +- 2.1 % + Int Stepper (seq only) = 0.031 +- 3.1 % + +fast summation +10000 elements +collection.immutable.TreeSet + Int, base collection #- 0.048 +- 1.4 % + Int, iterator on coll = 0.032 +- 3.8 % + Int, serial stream - 0.019 +- 2.7 % + Int Stepper (seq only) = 0.030 +- 2.5 % + +fast summation +10000 elements +collection.immutable.Vector + Int, base collection # 0.039 +- 12.1 % + Int, iterator on coll # 0.041 +- 3.3 % + Int, serial stream = 0.027 +- 14.5 % + Int Stepper (can par) ###- 0.131 +- 0.8 % + +fast summation +10000 elements +collection.mutable.WrappedArray + Int, base collection # 0.035 +- 2.0 % + Int, iterator on coll = 0.028 +- 2.5 % + Int, serial stream = 0.025 +- 17.6 % + Int Stepper (can par) #- 0.050 +- 7.6 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +slow trigonometry +10 elements +collection.mutable.ArrayBuffer + Int, base collection ########################- 0.980 +- 0.2 % + Int, iterator on coll ########################= 0.980 +- 0.2 % + Int, serial stream ########################= 0.982 +- 0.2 % + Int Stepper (can par) ######################### 0.997 +- 0.1 % + +slow trigonometry +10 elements +Array + Int, base collection ######################### 1.000 +- 0.1 % + Int, iterator on coll ########################= 0.981 +- 0.2 % + Int, serial stream ########################= 0.989 +- 0.1 % + Int Stepper (can par) ######################### 1.000 +- 0.1 % + +slow trigonometry +10 elements +collection.mutable.ArraySeq + Int, base collection ################ 0.645 +- 0.1 % + Int, iterator on coll ########################= 0.980 +- 0.2 % + Int, serial stream ########################= 0.984 +- 0.1 % + Int Stepper (can par) ######################### 0.998 +- 0.2 % + +slow trigonometry +10 elements +collection.mutable.ArrayStack + Int, base collection ################ 0.642 +- 0.2 % + Int, iterator on coll ########################= 0.981 +- 0.5 % + Int, serial stream ########################= 0.983 +- 0.2 % + Int Stepper (seq only) ######################### 0.994 +- 0.2 % + +slow trigonometry +10 elements +collection.immutable.ListSet + Int, base collection ######################= 0.910 +- 0.3 % + Int, iterator on coll ########################- 0.978 +- 0.3 % + Int, serial stream ########################= 0.980 +- 0.2 % + Int Stepper (seq only) ########################= 0.991 +- 0.2 % + +slow trigonometry +10 elements +collection.immutable.HashSet + Int, base collection ###############= 0.628 +- 0.2 % + Int, iterator on coll ########################- 0.978 +- 0.4 % + Int, serial stream ########################- 0.977 +- 0.2 % + Int Stepper (can par) ########################= 0.989 +- 0.2 % + +slow trigonometry +10 elements +java.util.ArrayList + Int, serial stream ########################= 0.985 +- 0.2 % + +slow trigonometry +10 elements +java.util.LinkedList + Int, serial stream ########################= 0.988 +- 0.1 % + +slow trigonometry +10 elements +collection.mutable.LinkedHashSet + Int, base collection #######################= 0.950 +- 0.3 % + Int, iterator on coll ########################= 0.981 +- 0.2 % + Int, serial stream ########################= 0.984 +- 0.2 % + Int Stepper (seq only) ######################### 0.995 +- 0.3 % + +slow trigonometry +10 elements +collection.immutable.List + Int, base collection ########################- 0.974 +- 0.3 % + Int, iterator on coll ########################- 0.977 +- 0.2 % + Int, serial stream ########################- 0.978 +- 0.3 % + Int Stepper (seq only) ########################= 0.990 +- 0.1 % + +slow trigonometry +10 elements +collection.mutable.HashSet + Int, base collection #######################= 0.944 +- 0.2 % + Int, iterator on coll ########################= 0.981 +- 0.2 % + Int, serial stream ########################- 0.979 +- 0.6 % + Int Stepper (can par) ########################= 0.993 +- 0.1 % + +slow trigonometry +10 elements +collection.mutable.Queue + Int, base collection ######################= 0.909 +- 0.3 % + Int, iterator on coll ########################- 0.977 +- 0.1 % + Int, serial stream ######################## 0.962 +- 0.2 % + Int Stepper (seq only) ########################- 0.976 +- 0.2 % + +slow trigonometry +10 elements +collection.mutable.PriorityQueue + Int, base collection ########################- 0.970 +- 0.3 % + Int, iterator on coll ########################- 0.976 +- 0.2 % + Int, serial stream ########################- 0.978 +- 0.3 % + Int Stepper (seq only) ########################= 0.990 +- 0.1 % + +slow trigonometry +10 elements +collection.immutable.Queue + Int, base collection ######################## 0.956 +- 0.2 % + Int, iterator on coll ########################- 0.976 +- 0.3 % + Int, serial stream ########################- 0.978 +- 0.6 % + Int Stepper (seq only) ########################- 0.977 +- 0.2 % + +slow trigonometry +10 elements +collection.immutable.Stream + Int, base collection #######################= 0.941 +- 0.3 % + Int, iterator on coll ######################## 0.960 +- 0.3 % + Int, serial stream ########################- 0.978 +- 0.2 % + Int Stepper (seq only) ########################= 0.991 +- 0.1 % + +slow trigonometry +10 elements +collection.immutable.TreeSet + Int, base collection ###############= 0.624 +- 0.2 % + Int, iterator on coll ########################- 0.972 +- 0.2 % + Int, serial stream ########################- 0.978 +- 0.2 % + Int Stepper (seq only) ########################= 0.987 +- 0.1 % + +slow trigonometry +10 elements +collection.immutable.Vector + Int, base collection ########################- 0.975 +- 0.2 % + Int, iterator on coll ########################= 0.981 +- 0.1 % + Int, serial stream ########################= 0.984 +- 0.1 % + Int Stepper (can par) ######################### 0.997 +- 0.2 % + +slow trigonometry +10 elements +collection.mutable.WrappedArray + Int, base collection ########################- 0.974 +- 0.2 % + Int, iterator on coll ########################- 0.980 +- 0.2 % + Int, serial stream ########################= 0.985 +- 0.2 % + Int Stepper (can par) ######################### 0.996 +- 0.1 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> +slow trigonometry +10000 elements +collection.mutable.ArrayBuffer + Int, base collection ########################= 0.985 +- 0.2 % + Int, iterator on coll ########################= 0.986 +- 0.8 % + Int, serial stream ########################= 0.988 +- 0.2 % + Int Stepper (can par) ######################### 0.996 +- 0.1 % + +slow trigonometry +10000 elements +Array + Int, base collection ######################### 1.000 +- 0.2 % + Int, iterator on coll ########################= 0.987 +- 0.2 % + Int, serial stream ########################= 0.990 +- 0.1 % + Int Stepper (can par) ######################### 1.000 +- 0.1 % + +slow trigonometry +10000 elements +collection.mutable.ArraySeq + Int, base collection ################## 0.725 +- 13.6 % + Int, iterator on coll ########################= 0.988 +- 0.2 % + Int, serial stream ########################= 0.987 +- 0.3 % + Int Stepper (can par) ######################### 0.995 +- 0.2 % + +slow trigonometry +10000 elements +collection.mutable.ArrayStack + Int, base collection ################- 0.651 +- 0.2 % + Int, iterator on coll ########################= 0.986 +- 0.5 % + Int, serial stream ########################= 0.988 +- 0.2 % + Int Stepper (seq only) ######################### 0.995 +- 0.1 % + +slow trigonometry +10000 elements +collection.immutable.ListSet + Int, base collection ######################- 0.888 +- 0.4 % + Int, iterator on coll ########################= 0.988 +- 0.2 % + Int, serial stream ########################= 0.986 +- 0.2 % + Int Stepper (seq only) ########################= 0.992 +- 0.2 % + +slow trigonometry +10000 elements +collection.immutable.HashSet + Int, base collection ##############- 0.569 +- 0.4 % + Int, iterator on coll ########################- 0.977 +- 0.4 % + Int, serial stream ########################- 0.969 +- 0.7 % + Int Stepper (can par) ########################- 0.969 +- 0.1 % + +slow trigonometry +10000 elements +java.util.ArrayList + Int, serial stream ########################= 0.989 +- 0.1 % + +slow trigonometry +10000 elements +java.util.LinkedList + Int, serial stream ########################= 0.989 +- 0.2 % + +slow trigonometry +10000 elements +collection.mutable.LinkedHashSet + Int, base collection ####################### 0.922 +- 0.3 % + Int, iterator on coll ########################= 0.991 +- 0.2 % + Int, serial stream ########################= 0.986 +- 0.2 % + Int Stepper (seq only) ######################### 0.996 +- 0.1 % + +slow trigonometry +10000 elements +collection.immutable.List + Int, base collection ########################= 0.982 +- 0.2 % + Int, iterator on coll ########################= 0.990 +- 0.2 % + Int, serial stream ########################= 0.987 +- 0.2 % + Int Stepper (seq only) ######################### 0.995 +- 0.1 % + +slow trigonometry +10000 elements +collection.mutable.HashSet + Int, base collection ######################= 0.904 +- 0.3 % + Int, iterator on coll ########################- 0.979 +- 0.3 % + Int, serial stream ########################- 0.977 +- 0.3 % + Int Stepper (can par) ########################= 0.982 +- 0.2 % + +slow trigonometry +10000 elements +collection.mutable.Queue + Int, base collection #######################= 0.945 +- 0.2 % + Int, iterator on coll ########################= 0.990 +- 0.2 % + Int, serial stream ######################## 0.960 +- 0.7 % + Int Stepper (seq only) ######################## 0.959 +- 0.2 % + +slow trigonometry +10000 elements +collection.mutable.PriorityQueue + Int, base collection ################- 0.650 +- 0.2 % + Int, iterator on coll ########################= 0.984 +- 0.4 % + Int, serial stream ########################= 0.982 +- 0.3 % + Int Stepper (seq only) ########################= 0.983 +- 0.2 % + +slow trigonometry +10000 elements +collection.immutable.Queue + Int, base collection ######################## 0.966 +- 0.2 % + Int, iterator on coll ########################= 0.990 +- 0.3 % + Int, serial stream ########################- 0.975 +- 1.1 % + Int Stepper (seq only) ########################= 0.982 +- 0.3 % + +slow trigonometry +10000 elements +collection.immutable.Stream + Int, base collection #######################= 0.941 +- 0.3 % + Int, iterator on coll ######################## 0.962 +- 0.4 % + Int, serial stream ########################= 0.982 +- 0.3 % + Int Stepper (seq only) ########################= 0.988 +- 0.3 % + +slow trigonometry +10000 elements +collection.immutable.TreeSet + Int, base collection ##############- 0.571 +- 0.4 % + Int, iterator on coll ########################- 0.978 +- 0.3 % + Int, serial stream ########################- 0.973 +- 0.3 % + Int Stepper (seq only) ########################- 0.972 +- 0.6 % + +slow trigonometry +10000 elements +collection.immutable.Vector + Int, base collection ########################= 0.982 +- 0.2 % + Int, iterator on coll ########################= 0.990 +- 0.2 % + Int, serial stream ########################= 0.988 +- 0.1 % + Int Stepper (can par) ######################### 0.996 +- 0.1 % + +slow trigonometry +10000 elements +collection.mutable.WrappedArray + Int, base collection ########################= 0.982 +- 0.2 % + Int, iterator on coll ########################= 0.989 +- 0.1 % + Int, serial stream ########################= 0.988 +- 0.1 % + Int Stepper (can par) ########################= 0.992 +- 0.2 % +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> diff --git a/benchmark/results/jmhbench.log b/benchmark/results/jmhbench.log new file mode 100644 index 0000000..63568d1 --- /dev/null +++ b/benchmark/results/jmhbench.log @@ -0,0 +1,1054 @@ +[info] +[info] # Run complete. Total time: 11:52:29 +[info] +[info] Benchmark Mode Cnt Score Error Units +[info] JmhBench.bench_cI_fmc_arb_0 thrpt 25 8035423.336 ± 755409.322 ops/s +[info] JmhBench.bench_cI_fmc_arb_1 thrpt 25 20747.799 ± 136.417 ops/s +[info] JmhBench.bench_cI_fmc_arr_0 thrpt 25 71925991.876 ± 147120.839 ops/s +[info] JmhBench.bench_cI_fmc_arr_1 thrpt 25 92378.767 ± 100.507 ops/s +[info] JmhBench.bench_cI_fmc_ars_0 thrpt 25 4887009.610 ± 607583.010 ops/s +[info] JmhBench.bench_cI_fmc_ars_1 thrpt 25 16764.023 ± 542.731 ops/s +[info] JmhBench.bench_cI_fmc_ast_0 thrpt 25 5855354.579 ± 297609.045 ops/s +[info] JmhBench.bench_cI_fmc_ast_1 thrpt 25 17296.740 ± 230.641 ops/s +[info] JmhBench.bench_cI_fmc_ils_0 thrpt 25 2652720.262 ± 414787.126 ops/s +[info] JmhBench.bench_cI_fmc_ils_1 thrpt 25 487.348 ± 25.292 ops/s +[info] JmhBench.bench_cI_fmc_ish_0 thrpt 25 4987836.424 ± 646222.972 ops/s +[info] JmhBench.bench_cI_fmc_ish_1 thrpt 25 3382.659 ± 305.098 ops/s +[info] JmhBench.bench_cI_fmc_lhs_0 thrpt 25 5835320.671 ± 1009998.646 ops/s +[info] JmhBench.bench_cI_fmc_lhs_1 thrpt 25 7535.361 ± 327.683 ops/s +[info] JmhBench.bench_cI_fmc_lst_0 thrpt 25 7099167.181 ± 429761.270 ops/s +[info] JmhBench.bench_cI_fmc_lst_1 thrpt 25 12440.006 ± 224.875 ops/s +[info] JmhBench.bench_cI_fmc_mhs_0 thrpt 25 4291393.195 ± 47473.930 ops/s +[info] JmhBench.bench_cI_fmc_mhs_1 thrpt 25 4203.392 ± 112.937 ops/s +[info] JmhBench.bench_cI_fmc_muq_0 thrpt 25 2496111.379 ± 333932.718 ops/s +[info] JmhBench.bench_cI_fmc_muq_1 thrpt 25 5458.980 ± 58.848 ops/s +[info] JmhBench.bench_cI_fmc_prq_0 thrpt 25 6648768.148 ± 240161.420 ops/s +[info] JmhBench.bench_cI_fmc_prq_1 thrpt 25 9518.984 ± 882.821 ops/s +[info] JmhBench.bench_cI_fmc_que_0 thrpt 25 4993970.353 ± 84787.715 ops/s +[info] JmhBench.bench_cI_fmc_que_1 thrpt 25 7301.230 ± 48.307 ops/s +[info] JmhBench.bench_cI_fmc_stm_0 thrpt 25 5372437.328 ± 295448.912 ops/s +[info] JmhBench.bench_cI_fmc_stm_1 thrpt 25 8047.781 ± 190.906 ops/s +[info] JmhBench.bench_cI_fmc_trs_0 thrpt 25 4988067.719 ± 194129.387 ops/s +[info] JmhBench.bench_cI_fmc_trs_1 thrpt 25 2085.913 ± 52.524 ops/s +[info] JmhBench.bench_cI_fmc_vec_0 thrpt 25 6669754.581 ± 192303.049 ops/s +[info] JmhBench.bench_cI_fmc_vec_1 thrpt 25 14678.526 ± 774.640 ops/s +[info] JmhBench.bench_cI_fmc_wra_0 thrpt 25 6826577.628 ± 145826.670 ops/s +[info] JmhBench.bench_cI_fmc_wra_1 thrpt 25 11525.280 ± 217.619 ops/s +[info] JmhBench.bench_cI_mdtc_arb_0 thrpt 25 4443593.338 ± 31059.496 ops/s +[info] JmhBench.bench_cI_mdtc_arb_1 thrpt 25 11797.585 ± 410.228 ops/s +[info] JmhBench.bench_cI_mdtc_arr_0 thrpt 25 103444873.867 ± 944076.540 ops/s +[info] JmhBench.bench_cI_mdtc_arr_1 thrpt 25 75842976.523 ± 549782.886 ops/s +[info] JmhBench.bench_cI_mdtc_ars_0 thrpt 25 3207470.098 ± 38695.027 ops/s +[info] JmhBench.bench_cI_mdtc_ars_1 thrpt 25 8556.621 ± 195.807 ops/s +[info] JmhBench.bench_cI_mdtc_ast_0 thrpt 25 2355394.121 ± 38473.884 ops/s +[info] JmhBench.bench_cI_mdtc_ast_1 thrpt 25 8607.394 ± 728.653 ops/s +[info] JmhBench.bench_cI_mdtc_lst_0 thrpt 25 6468557.411 ± 137487.766 ops/s +[info] JmhBench.bench_cI_mdtc_lst_1 thrpt 25 13991.072 ± 136.574 ops/s +[info] JmhBench.bench_cI_mdtc_muq_0 thrpt 25 1345227.001 ± 58882.319 ops/s +[info] JmhBench.bench_cI_mdtc_muq_1 thrpt 25 2502.709 ± 79.914 ops/s +[info] JmhBench.bench_cI_mdtc_que_0 thrpt 25 2301715.037 ± 18642.229 ops/s +[info] JmhBench.bench_cI_mdtc_que_1 thrpt 25 4201.094 ± 57.552 ops/s +[info] JmhBench.bench_cI_mdtc_stm_0 thrpt 25 1351471.957 ± 46355.839 ops/s +[info] JmhBench.bench_cI_mdtc_stm_1 thrpt 25 664904.819 ± 42603.168 ops/s +[info] JmhBench.bench_cI_mdtc_vec_0 thrpt 25 5978680.943 ± 65546.254 ops/s +[info] JmhBench.bench_cI_mdtc_vec_1 thrpt 25 15557.817 ± 309.459 ops/s +[info] JmhBench.bench_cI_mdtc_wra_0 thrpt 25 4673494.151 ± 27640.256 ops/s +[info] JmhBench.bench_cI_mdtc_wra_1 thrpt 25 9370.962 ± 826.045 ops/s +[info] JmhBench.bench_cI_sum_arb_0 thrpt 25 18335880.324 ± 860611.155 ops/s +[info] JmhBench.bench_cI_sum_arb_1 thrpt 25 18799.347 ± 521.962 ops/s +[info] JmhBench.bench_cI_sum_arr_0 thrpt 25 113429397.689 ± 246632.083 ops/s +[info] JmhBench.bench_cI_sum_arr_1 thrpt 25 433573.009 ± 405.219 ops/s +[info] JmhBench.bench_cI_sum_ars_0 thrpt 25 18914632.257 ± 65515.415 ops/s +[info] JmhBench.bench_cI_sum_ars_1 thrpt 25 18879.126 ± 757.161 ops/s +[info] JmhBench.bench_cI_sum_ast_0 thrpt 25 19140457.190 ± 777303.561 ops/s +[info] JmhBench.bench_cI_sum_ast_1 thrpt 25 18800.310 ± 2869.443 ops/s +[info] JmhBench.bench_cI_sum_ils_0 thrpt 25 12156588.727 ± 114845.877 ops/s +[info] JmhBench.bench_cI_sum_ils_1 thrpt 25 25213.376 ± 444.053 ops/s +[info] JmhBench.bench_cI_sum_ish_0 thrpt 25 16761123.678 ± 614314.543 ops/s +[info] JmhBench.bench_cI_sum_ish_1 thrpt 25 12198.185 ± 194.961 ops/s +[info] JmhBench.bench_cI_sum_lhs_0 thrpt 25 18039130.300 ± 906017.637 ops/s +[info] JmhBench.bench_cI_sum_lhs_1 thrpt 25 18922.333 ± 2320.040 ops/s +[info] JmhBench.bench_cI_sum_lst_0 thrpt 25 29817559.612 ± 58412.021 ops/s +[info] JmhBench.bench_cI_sum_lst_1 thrpt 25 22834.330 ± 2183.263 ops/s +[info] JmhBench.bench_cI_sum_mhs_0 thrpt 25 12329436.432 ± 253623.619 ops/s +[info] JmhBench.bench_cI_sum_mhs_1 thrpt 25 8952.772 ± 723.365 ops/s +[info] JmhBench.bench_cI_sum_muq_0 thrpt 25 4613996.286 ± 35553.633 ops/s +[info] JmhBench.bench_cI_sum_muq_1 thrpt 25 3965.004 ± 40.291 ops/s +[info] JmhBench.bench_cI_sum_prq_0 thrpt 25 11768606.036 ± 322246.512 ops/s +[info] JmhBench.bench_cI_sum_prq_1 thrpt 25 14595.293 ± 4283.029 ops/s +[info] JmhBench.bench_cI_sum_que_0 thrpt 25 12216359.219 ± 84893.062 ops/s +[info] JmhBench.bench_cI_sum_que_1 thrpt 25 24107.095 ± 790.558 ops/s +[info] JmhBench.bench_cI_sum_stm_0 thrpt 25 29645049.582 ± 59278.612 ops/s +[info] JmhBench.bench_cI_sum_stm_1 thrpt 25 18080.340 ± 2059.034 ops/s +[info] JmhBench.bench_cI_sum_trs_0 thrpt 25 18874037.032 ± 120977.755 ops/s +[info] JmhBench.bench_cI_sum_trs_1 thrpt 25 20810.794 ± 288.713 ops/s +[info] JmhBench.bench_cI_sum_vec_0 thrpt 25 11742774.808 ± 63268.375 ops/s +[info] JmhBench.bench_cI_sum_vec_1 thrpt 25 16853.876 ± 2040.181 ops/s +[info] JmhBench.bench_cI_sum_wra_0 thrpt 25 16037638.914 ± 48094.558 ops/s +[info] JmhBench.bench_cI_sum_wra_1 thrpt 25 15035.828 ± 298.395 ops/s +[info] JmhBench.bench_cI_trig_arb_0 thrpt 25 127384.023 ± 230.345 ops/s +[info] JmhBench.bench_cI_trig_arb_1 thrpt 25 127.523 ± 0.207 ops/s +[info] JmhBench.bench_cI_trig_arr_0 thrpt 25 129984.043 ± 152.548 ops/s +[info] JmhBench.bench_cI_trig_arr_1 thrpt 25 129.418 ± 0.233 ops/s +[info] JmhBench.bench_cI_trig_ars_0 thrpt 25 83843.573 ± 112.771 ops/s +[info] JmhBench.bench_cI_trig_ars_1 thrpt 25 93.856 ± 12.751 ops/s +[info] JmhBench.bench_cI_trig_ast_0 thrpt 25 83514.043 ± 144.645 ops/s +[info] JmhBench.bench_cI_trig_ast_1 thrpt 25 84.199 ± 0.141 ops/s +[info] JmhBench.bench_cI_trig_ils_0 thrpt 25 118241.682 ± 333.038 ops/s +[info] JmhBench.bench_cI_trig_ils_1 thrpt 25 114.961 ± 0.485 ops/s +[info] JmhBench.bench_cI_trig_ish_0 thrpt 25 81627.106 ± 148.992 ops/s +[info] JmhBench.bench_cI_trig_ish_1 thrpt 25 73.677 ± 0.328 ops/s +[info] JmhBench.bench_cI_trig_lhs_0 thrpt 25 123484.374 ± 418.215 ops/s +[info] JmhBench.bench_cI_trig_lhs_1 thrpt 25 119.347 ± 0.338 ops/s +[info] JmhBench.bench_cI_trig_lst_0 thrpt 25 126621.643 ± 359.486 ops/s +[info] JmhBench.bench_cI_trig_lst_1 thrpt 25 127.104 ± 0.287 ops/s +[info] JmhBench.bench_cI_trig_mhs_0 thrpt 25 122733.756 ± 238.867 ops/s +[info] JmhBench.bench_cI_trig_mhs_1 thrpt 25 117.038 ± 0.317 ops/s +[info] JmhBench.bench_cI_trig_muq_0 thrpt 25 118220.195 ± 404.616 ops/s +[info] JmhBench.bench_cI_trig_muq_1 thrpt 25 122.307 ± 0.246 ops/s +[info] JmhBench.bench_cI_trig_prq_0 thrpt 25 126111.667 ± 361.165 ops/s +[info] JmhBench.bench_cI_trig_prq_1 thrpt 25 84.151 ± 0.161 ops/s +[info] JmhBench.bench_cI_trig_que_0 thrpt 25 124229.823 ± 295.406 ops/s +[info] JmhBench.bench_cI_trig_que_1 thrpt 25 124.972 ± 0.303 ops/s +[info] JmhBench.bench_cI_trig_stm_0 thrpt 25 122279.057 ± 426.484 ops/s +[info] JmhBench.bench_cI_trig_stm_1 thrpt 25 121.842 ± 0.404 ops/s +[info] JmhBench.bench_cI_trig_trs_0 thrpt 25 81105.244 ± 128.368 ops/s +[info] JmhBench.bench_cI_trig_trs_1 thrpt 25 73.845 ± 0.314 ops/s +[info] JmhBench.bench_cI_trig_vec_0 thrpt 25 126718.115 ± 223.274 ops/s +[info] JmhBench.bench_cI_trig_vec_1 thrpt 25 127.101 ± 0.219 ops/s +[info] JmhBench.bench_cI_trig_wra_0 thrpt 25 126664.936 ± 216.923 ops/s +[info] JmhBench.bench_cI_trig_wra_1 thrpt 25 127.123 ± 0.273 ops/s +[info] JmhBench.bench_cS_fmc_arb_0 thrpt 25 8309821.046 ± 49072.647 ops/s +[info] JmhBench.bench_cS_fmc_arb_1 thrpt 25 17422.556 ± 212.521 ops/s +[info] JmhBench.bench_cS_fmc_arr_0 thrpt 25 54179418.146 ± 74335.302 ops/s +[info] JmhBench.bench_cS_fmc_arr_1 thrpt 25 53181.927 ± 446.294 ops/s +[info] JmhBench.bench_cS_fmc_ars_0 thrpt 25 5103543.646 ± 215901.936 ops/s +[info] JmhBench.bench_cS_fmc_ars_1 thrpt 25 15554.519 ± 823.745 ops/s +[info] JmhBench.bench_cS_fmc_ast_0 thrpt 25 6700667.336 ± 85523.401 ops/s +[info] JmhBench.bench_cS_fmc_ast_1 thrpt 25 17258.969 ± 314.253 ops/s +[info] JmhBench.bench_cS_fmc_ils_0 thrpt 25 2941128.962 ± 142969.982 ops/s +[info] JmhBench.bench_cS_fmc_ils_1 thrpt 25 135.655 ± 11.876 ops/s +[info] JmhBench.bench_cS_fmc_ish_0 thrpt 25 5044613.272 ± 157906.830 ops/s +[info] JmhBench.bench_cS_fmc_ish_1 thrpt 25 2872.376 ± 36.541 ops/s +[info] JmhBench.bench_cS_fmc_lhs_0 thrpt 25 7265749.135 ± 101616.705 ops/s +[info] JmhBench.bench_cS_fmc_lhs_1 thrpt 25 4845.430 ± 26.025 ops/s +[info] JmhBench.bench_cS_fmc_lst_0 thrpt 25 7501407.130 ± 224844.340 ops/s +[info] JmhBench.bench_cS_fmc_lst_1 thrpt 25 9301.948 ± 312.874 ops/s +[info] JmhBench.bench_cS_fmc_mhs_0 thrpt 25 5517297.556 ± 113251.344 ops/s +[info] JmhBench.bench_cS_fmc_mhs_1 thrpt 25 2515.193 ± 13.965 ops/s +[info] JmhBench.bench_cS_fmc_muq_0 thrpt 25 1899649.245 ± 35990.877 ops/s +[info] JmhBench.bench_cS_fmc_muq_1 thrpt 25 2421.534 ± 17.772 ops/s +[info] JmhBench.bench_cS_fmc_prq_0 thrpt 25 6326339.430 ± 63789.733 ops/s +[info] JmhBench.bench_cS_fmc_prq_1 thrpt 25 6136.747 ± 671.604 ops/s +[info] JmhBench.bench_cS_fmc_que_0 thrpt 25 5503013.635 ± 251275.430 ops/s +[info] JmhBench.bench_cS_fmc_que_1 thrpt 25 7203.193 ± 147.509 ops/s +[info] JmhBench.bench_cS_fmc_stm_0 thrpt 25 5472585.642 ± 388387.172 ops/s +[info] JmhBench.bench_cS_fmc_stm_1 thrpt 25 6386.356 ± 157.946 ops/s +[info] JmhBench.bench_cS_fmc_trs_0 thrpt 25 5265384.984 ± 84795.494 ops/s +[info] JmhBench.bench_cS_fmc_trs_1 thrpt 25 1272.612 ± 11.984 ops/s +[info] JmhBench.bench_cS_fmc_vec_0 thrpt 25 6851462.782 ± 52096.647 ops/s +[info] JmhBench.bench_cS_fmc_vec_1 thrpt 25 10994.253 ± 317.945 ops/s +[info] JmhBench.bench_cS_fmc_wra_0 thrpt 25 6661645.674 ± 74676.590 ops/s +[info] JmhBench.bench_cS_fmc_wra_1 thrpt 25 17783.760 ± 433.679 ops/s +[info] JmhBench.bench_cS_htrg_arb_0 thrpt 25 98786.541 ± 135.038 ops/s +[info] JmhBench.bench_cS_htrg_arb_1 thrpt 25 98.001 ± 0.163 ops/s +[info] JmhBench.bench_cS_htrg_arr_0 thrpt 25 100566.283 ± 111.351 ops/s +[info] JmhBench.bench_cS_htrg_arr_1 thrpt 25 99.116 ± 0.146 ops/s +[info] JmhBench.bench_cS_htrg_ars_0 thrpt 25 70824.755 ± 159.884 ops/s +[info] JmhBench.bench_cS_htrg_ars_1 thrpt 25 83.161 ± 9.309 ops/s +[info] JmhBench.bench_cS_htrg_ast_0 thrpt 25 70497.771 ± 137.953 ops/s +[info] JmhBench.bench_cS_htrg_ast_1 thrpt 25 70.669 ± 0.111 ops/s +[info] JmhBench.bench_cS_htrg_ils_0 thrpt 25 93979.601 ± 143.990 ops/s +[info] JmhBench.bench_cS_htrg_ils_1 thrpt 25 90.501 ± 0.227 ops/s +[info] JmhBench.bench_cS_htrg_ish_0 thrpt 25 69330.554 ± 108.192 ops/s +[info] JmhBench.bench_cS_htrg_ish_1 thrpt 25 62.102 ± 0.265 ops/s +[info] JmhBench.bench_cS_htrg_lhs_0 thrpt 25 96535.109 ± 339.936 ops/s +[info] JmhBench.bench_cS_htrg_lhs_1 thrpt 25 93.221 ± 0.205 ops/s +[info] JmhBench.bench_cS_htrg_lst_0 thrpt 25 99009.318 ± 190.428 ops/s +[info] JmhBench.bench_cS_htrg_lst_1 thrpt 25 97.804 ± 0.203 ops/s +[info] JmhBench.bench_cS_htrg_mhs_0 thrpt 25 96569.398 ± 329.995 ops/s +[info] JmhBench.bench_cS_htrg_mhs_1 thrpt 25 90.747 ± 0.333 ops/s +[info] JmhBench.bench_cS_htrg_muq_0 thrpt 25 93247.558 ± 334.412 ops/s +[info] JmhBench.bench_cS_htrg_muq_1 thrpt 25 94.833 ± 0.283 ops/s +[info] JmhBench.bench_cS_htrg_prq_0 thrpt 25 98817.647 ± 226.278 ops/s +[info] JmhBench.bench_cS_htrg_prq_1 thrpt 25 70.441 ± 0.178 ops/s +[info] JmhBench.bench_cS_htrg_que_0 thrpt 25 97188.745 ± 169.364 ops/s +[info] JmhBench.bench_cS_htrg_que_1 thrpt 25 96.327 ± 0.291 ops/s +[info] JmhBench.bench_cS_htrg_stm_0 thrpt 25 96162.631 ± 162.971 ops/s +[info] JmhBench.bench_cS_htrg_stm_1 thrpt 25 94.593 ± 0.183 ops/s +[info] JmhBench.bench_cS_htrg_trs_0 thrpt 25 68623.297 ± 153.165 ops/s +[info] JmhBench.bench_cS_htrg_trs_1 thrpt 25 62.935 ± 0.265 ops/s +[info] JmhBench.bench_cS_htrg_vec_0 thrpt 25 98793.504 ± 237.187 ops/s +[info] JmhBench.bench_cS_htrg_vec_1 thrpt 25 97.576 ± 0.231 ops/s +[info] JmhBench.bench_cS_htrg_wra_0 thrpt 25 98854.153 ± 239.193 ops/s +[info] JmhBench.bench_cS_htrg_wra_1 thrpt 25 97.760 ± 0.233 ops/s +[info] JmhBench.bench_cS_mdtc_arb_0 thrpt 25 1918033.395 ± 16232.747 ops/s +[info] JmhBench.bench_cS_mdtc_arb_1 thrpt 25 1617.014 ± 148.727 ops/s +[info] JmhBench.bench_cS_mdtc_arr_0 thrpt 25 3952629.619 ± 25689.408 ops/s +[info] JmhBench.bench_cS_mdtc_arr_1 thrpt 25 3402684.776 ± 284371.307 ops/s +[info] JmhBench.bench_cS_mdtc_ars_0 thrpt 25 1638242.587 ± 14473.305 ops/s +[info] JmhBench.bench_cS_mdtc_ars_1 thrpt 25 1674.373 ± 103.550 ops/s +[info] JmhBench.bench_cS_mdtc_ast_0 thrpt 25 1325591.166 ± 23723.433 ops/s +[info] JmhBench.bench_cS_mdtc_ast_1 thrpt 25 1675.501 ± 116.342 ops/s +[info] JmhBench.bench_cS_mdtc_lst_0 thrpt 25 2161734.107 ± 109210.632 ops/s +[info] JmhBench.bench_cS_mdtc_lst_1 thrpt 25 1763.655 ± 175.337 ops/s +[info] JmhBench.bench_cS_mdtc_muq_0 thrpt 25 929318.339 ± 31646.687 ops/s +[info] JmhBench.bench_cS_mdtc_muq_1 thrpt 25 1139.156 ± 90.269 ops/s +[info] JmhBench.bench_cS_mdtc_que_0 thrpt 25 1327489.572 ± 14055.324 ops/s +[info] JmhBench.bench_cS_mdtc_que_1 thrpt 25 1394.055 ± 52.289 ops/s +[info] JmhBench.bench_cS_mdtc_stm_0 thrpt 25 989690.937 ± 25041.440 ops/s +[info] JmhBench.bench_cS_mdtc_stm_1 thrpt 25 928921.110 ± 18259.866 ops/s +[info] JmhBench.bench_cS_mdtc_vec_0 thrpt 25 2021845.500 ± 133044.349 ops/s +[info] JmhBench.bench_cS_mdtc_vec_1 thrpt 25 1752.712 ± 147.928 ops/s +[info] JmhBench.bench_cS_mdtc_wra_0 thrpt 25 1864950.656 ± 83548.897 ops/s +[info] JmhBench.bench_cS_mdtc_wra_1 thrpt 25 1933.967 ± 33.820 ops/s +[info] JmhBench.bench_cS_nbr_arb_0 thrpt 25 15835328.428 ± 122519.272 ops/s +[info] JmhBench.bench_cS_nbr_arb_1 thrpt 25 16840.766 ± 148.609 ops/s +[info] JmhBench.bench_cS_nbr_arr_0 thrpt 25 47695446.412 ± 182044.808 ops/s +[info] JmhBench.bench_cS_nbr_arr_1 thrpt 25 49504.973 ± 304.514 ops/s +[info] JmhBench.bench_cS_nbr_ars_0 thrpt 25 34856004.018 ± 401233.203 ops/s +[info] JmhBench.bench_cS_nbr_ars_1 thrpt 25 41092.593 ± 281.731 ops/s +[info] JmhBench.bench_cS_nbr_ast_0 thrpt 25 34757354.930 ± 535394.356 ops/s +[info] JmhBench.bench_cS_nbr_ast_1 thrpt 25 32360.450 ± 2172.594 ops/s +[info] JmhBench.bench_cS_nbr_ils_0 thrpt 25 10446844.257 ± 42499.180 ops/s +[info] JmhBench.bench_cS_nbr_ils_1 thrpt 25 18753.206 ± 5631.309 ops/s +[info] JmhBench.bench_cS_nbr_ish_0 thrpt 25 12678676.031 ± 1433593.642 ops/s +[info] JmhBench.bench_cS_nbr_ish_1 thrpt 25 6185.141 ± 193.784 ops/s +[info] JmhBench.bench_cS_nbr_lhs_0 thrpt 25 31868157.935 ± 355816.238 ops/s +[info] JmhBench.bench_cS_nbr_lhs_1 thrpt 25 34227.377 ± 800.219 ops/s +[info] JmhBench.bench_cS_nbr_lst_0 thrpt 25 29253945.968 ± 374855.998 ops/s +[info] JmhBench.bench_cS_nbr_lst_1 thrpt 25 35282.813 ± 492.016 ops/s +[info] JmhBench.bench_cS_nbr_mhs_0 thrpt 25 21835577.282 ± 1003688.165 ops/s +[info] JmhBench.bench_cS_nbr_mhs_1 thrpt 25 6272.351 ± 12.551 ops/s +[info] JmhBench.bench_cS_nbr_muq_0 thrpt 25 4114620.171 ± 31542.905 ops/s +[info] JmhBench.bench_cS_nbr_muq_1 thrpt 25 3541.869 ± 48.004 ops/s +[info] JmhBench.bench_cS_nbr_prq_0 thrpt 25 14977117.576 ± 48707.583 ops/s +[info] JmhBench.bench_cS_nbr_prq_1 thrpt 25 16714.493 ± 5012.246 ops/s +[info] JmhBench.bench_cS_nbr_que_0 thrpt 25 11073586.828 ± 41004.373 ops/s +[info] JmhBench.bench_cS_nbr_que_1 thrpt 25 18873.971 ± 6113.197 ops/s +[info] JmhBench.bench_cS_nbr_stm_0 thrpt 25 30003437.166 ± 204518.569 ops/s +[info] JmhBench.bench_cS_nbr_stm_1 thrpt 25 14211.397 ± 688.546 ops/s +[info] JmhBench.bench_cS_nbr_trs_0 thrpt 25 16120209.876 ± 69947.418 ops/s +[info] JmhBench.bench_cS_nbr_trs_1 thrpt 25 8927.765 ± 60.115 ops/s +[info] JmhBench.bench_cS_nbr_vec_0 thrpt 25 13156985.945 ± 62188.986 ops/s +[info] JmhBench.bench_cS_nbr_vec_1 thrpt 25 20392.272 ± 3067.769 ops/s +[info] JmhBench.bench_cS_nbr_wra_0 thrpt 25 16390141.475 ± 56090.429 ops/s +[info] JmhBench.bench_cS_nbr_wra_1 thrpt 25 17489.550 ± 47.421 ops/s +[info] JmhBench.bench_cpI_pfmc_arb_0 thrpt 25 27474.789 ± 928.921 ops/s +[info] JmhBench.bench_cpI_pfmc_arb_1 thrpt 25 7441.663 ± 93.625 ops/s +[info] JmhBench.bench_cpI_pfmc_arr_0 thrpt 25 28387.894 ± 596.492 ops/s +[info] JmhBench.bench_cpI_pfmc_arr_1 thrpt 25 5064.334 ± 50.797 ops/s +[info] JmhBench.bench_cpI_pfmc_ars_0 thrpt 25 28798.766 ± 335.791 ops/s +[info] JmhBench.bench_cpI_pfmc_ars_1 thrpt 25 7437.116 ± 108.032 ops/s +[info] JmhBench.bench_cpI_pfmc_ish_0 thrpt 25 23565.704 ± 517.381 ops/s +[info] JmhBench.bench_cpI_pfmc_ish_1 thrpt 25 3360.815 ± 27.573 ops/s +[info] JmhBench.bench_cpI_pfmc_mhs_0 thrpt 25 30631.073 ± 337.113 ops/s +[info] JmhBench.bench_cpI_pfmc_mhs_1 thrpt 25 3679.460 ± 44.876 ops/s +[info] JmhBench.bench_cpI_pfmc_vec_0 thrpt 25 24037.121 ± 400.303 ops/s +[info] JmhBench.bench_cpI_pfmc_vec_1 thrpt 25 6158.006 ± 137.476 ops/s +[info] JmhBench.bench_cpI_pfmc_wra_0 thrpt 25 28461.197 ± 406.506 ops/s +[info] JmhBench.bench_cpI_pfmc_wra_1 thrpt 25 5437.056 ± 62.127 ops/s +[info] JmhBench.bench_cpI_psum_arb_0 thrpt 25 72361.424 ± 4874.360 ops/s +[info] JmhBench.bench_cpI_psum_arb_1 thrpt 25 24580.891 ± 188.592 ops/s +[info] JmhBench.bench_cpI_psum_arr_0 thrpt 25 81295.376 ± 4901.317 ops/s +[info] JmhBench.bench_cpI_psum_arr_1 thrpt 25 9877.067 ± 949.170 ops/s +[info] JmhBench.bench_cpI_psum_ars_0 thrpt 25 73484.665 ± 4937.921 ops/s +[info] JmhBench.bench_cpI_psum_ars_1 thrpt 25 24514.076 ± 192.678 ops/s +[info] JmhBench.bench_cpI_psum_ish_0 thrpt 25 59907.362 ± 3036.537 ops/s +[info] JmhBench.bench_cpI_psum_ish_1 thrpt 25 12840.071 ± 125.814 ops/s +[info] JmhBench.bench_cpI_psum_mhs_0 thrpt 25 86150.260 ± 888.758 ops/s +[info] JmhBench.bench_cpI_psum_mhs_1 thrpt 25 12432.879 ± 107.465 ops/s +[info] JmhBench.bench_cpI_psum_vec_0 thrpt 25 67410.471 ± 503.168 ops/s +[info] JmhBench.bench_cpI_psum_vec_1 thrpt 25 19689.700 ± 224.288 ops/s +[info] JmhBench.bench_cpI_psum_wra_0 thrpt 25 81936.702 ± 6116.035 ops/s +[info] JmhBench.bench_cpI_psum_wra_1 thrpt 25 12562.928 ± 602.681 ops/s +[info] JmhBench.bench_cpI_ptrig_arb_0 thrpt 25 27500.407 ± 451.591 ops/s +[info] JmhBench.bench_cpI_ptrig_arb_1 thrpt 25 636.484 ± 4.737 ops/s +[info] JmhBench.bench_cpI_ptrig_arr_0 thrpt 25 27522.840 ± 278.360 ops/s +[info] JmhBench.bench_cpI_ptrig_arr_1 thrpt 25 580.587 ± 12.258 ops/s +[info] JmhBench.bench_cpI_ptrig_ars_0 thrpt 25 27760.222 ± 383.837 ops/s +[info] JmhBench.bench_cpI_ptrig_ars_1 thrpt 25 626.056 ± 13.349 ops/s +[info] JmhBench.bench_cpI_ptrig_ish_0 thrpt 25 16795.172 ± 199.894 ops/s +[info] JmhBench.bench_cpI_ptrig_ish_1 thrpt 25 495.891 ± 11.044 ops/s +[info] JmhBench.bench_cpI_ptrig_mhs_0 thrpt 25 26673.936 ± 630.916 ops/s +[info] JmhBench.bench_cpI_ptrig_mhs_1 thrpt 25 424.598 ± 15.818 ops/s +[info] JmhBench.bench_cpI_ptrig_vec_0 thrpt 25 19853.009 ± 430.607 ops/s +[info] JmhBench.bench_cpI_ptrig_vec_1 thrpt 25 406.065 ± 6.945 ops/s +[info] JmhBench.bench_cpI_ptrig_wra_0 thrpt 25 27526.423 ± 304.224 ops/s +[info] JmhBench.bench_cpI_ptrig_wra_1 thrpt 25 590.973 ± 12.122 ops/s +[info] JmhBench.bench_cpS_pfmc_arb_0 thrpt 25 28842.541 ± 242.274 ops/s +[info] JmhBench.bench_cpS_pfmc_arb_1 thrpt 25 7437.332 ± 102.398 ops/s +[info] JmhBench.bench_cpS_pfmc_arr_0 thrpt 25 28444.328 ± 788.916 ops/s +[info] JmhBench.bench_cpS_pfmc_arr_1 thrpt 25 7480.643 ± 100.594 ops/s +[info] JmhBench.bench_cpS_pfmc_ars_0 thrpt 25 28489.525 ± 464.335 ops/s +[info] JmhBench.bench_cpS_pfmc_ars_1 thrpt 25 7509.309 ± 111.292 ops/s +[info] JmhBench.bench_cpS_pfmc_ish_0 thrpt 25 24479.975 ± 278.588 ops/s +[info] JmhBench.bench_cpS_pfmc_ish_1 thrpt 25 3249.879 ± 72.414 ops/s +[info] JmhBench.bench_cpS_pfmc_mhs_0 thrpt 25 27431.881 ± 346.329 ops/s +[info] JmhBench.bench_cpS_pfmc_mhs_1 thrpt 25 3515.211 ± 32.526 ops/s +[info] JmhBench.bench_cpS_pfmc_vec_0 thrpt 25 24390.048 ± 299.352 ops/s +[info] JmhBench.bench_cpS_pfmc_vec_1 thrpt 25 5880.720 ± 40.165 ops/s +[info] JmhBench.bench_cpS_pfmc_wra_0 thrpt 25 27932.560 ± 745.187 ops/s +[info] JmhBench.bench_cpS_pfmc_wra_1 thrpt 25 7371.214 ± 122.236 ops/s +[info] JmhBench.bench_cpS_phtrg_arb_0 thrpt 25 26302.214 ± 350.122 ops/s +[info] JmhBench.bench_cpS_phtrg_arb_1 thrpt 25 461.723 ± 9.907 ops/s +[info] JmhBench.bench_cpS_phtrg_arr_0 thrpt 25 26334.485 ± 294.864 ops/s +[info] JmhBench.bench_cpS_phtrg_arr_1 thrpt 25 471.983 ± 4.097 ops/s +[info] JmhBench.bench_cpS_phtrg_ars_0 thrpt 25 26745.117 ± 302.547 ops/s +[info] JmhBench.bench_cpS_phtrg_ars_1 thrpt 25 460.382 ± 10.151 ops/s +[info] JmhBench.bench_cpS_phtrg_ish_0 thrpt 25 16794.377 ± 217.144 ops/s +[info] JmhBench.bench_cpS_phtrg_ish_1 thrpt 25 381.766 ± 7.441 ops/s +[info] JmhBench.bench_cpS_phtrg_mhs_0 thrpt 25 24664.868 ± 234.874 ops/s +[info] JmhBench.bench_cpS_phtrg_mhs_1 thrpt 25 346.646 ± 6.224 ops/s +[info] JmhBench.bench_cpS_phtrg_vec_0 thrpt 25 19515.505 ± 291.700 ops/s +[info] JmhBench.bench_cpS_phtrg_vec_1 thrpt 25 329.508 ± 4.446 ops/s +[info] JmhBench.bench_cpS_phtrg_wra_0 thrpt 25 26324.009 ± 260.212 ops/s +[info] JmhBench.bench_cpS_phtrg_wra_1 thrpt 25 457.488 ± 11.205 ops/s +[info] JmhBench.bench_cpS_pnbr_arb_0 thrpt 25 73769.996 ± 4888.976 ops/s +[info] JmhBench.bench_cpS_pnbr_arb_1 thrpt 25 26655.950 ± 267.853 ops/s +[info] JmhBench.bench_cpS_pnbr_arr_0 thrpt 25 71213.033 ± 4539.260 ops/s +[info] JmhBench.bench_cpS_pnbr_arr_1 thrpt 25 26608.171 ± 238.450 ops/s +[info] JmhBench.bench_cpS_pnbr_ars_0 thrpt 25 78247.370 ± 9646.793 ops/s +[info] JmhBench.bench_cpS_pnbr_ars_1 thrpt 25 26383.234 ± 418.183 ops/s +[info] JmhBench.bench_cpS_pnbr_ish_0 thrpt 25 72540.054 ± 2073.362 ops/s +[info] JmhBench.bench_cpS_pnbr_ish_1 thrpt 25 10859.745 ± 100.505 ops/s +[info] JmhBench.bench_cpS_pnbr_mhs_0 thrpt 25 77753.110 ± 2422.987 ops/s +[info] JmhBench.bench_cpS_pnbr_mhs_1 thrpt 25 10940.002 ± 87.312 ops/s +[info] JmhBench.bench_cpS_pnbr_vec_0 thrpt 25 64658.143 ± 2006.249 ops/s +[info] JmhBench.bench_cpS_pnbr_vec_1 thrpt 25 20204.269 ± 242.223 ops/s +[info] JmhBench.bench_cpS_pnbr_wra_0 thrpt 25 73522.170 ± 7333.002 ops/s +[info] JmhBench.bench_cpS_pnbr_wra_1 thrpt 25 26573.617 ± 275.909 ops/s +[info] JmhBench.bench_iI_fmc_arb_0 thrpt 25 11534951.751 ± 1691165.189 ops/s +[info] JmhBench.bench_iI_fmc_arb_1 thrpt 25 23810.169 ± 96.643 ops/s +[info] JmhBench.bench_iI_fmc_arr_0 thrpt 25 10757732.243 ± 1081713.666 ops/s +[info] JmhBench.bench_iI_fmc_arr_1 thrpt 25 15286.908 ± 436.672 ops/s +[info] JmhBench.bench_iI_fmc_ars_0 thrpt 25 14166687.269 ± 1566994.869 ops/s +[info] JmhBench.bench_iI_fmc_ars_1 thrpt 25 23799.914 ± 175.777 ops/s +[info] JmhBench.bench_iI_fmc_ast_0 thrpt 25 12292010.377 ± 176433.123 ops/s +[info] JmhBench.bench_iI_fmc_ast_1 thrpt 25 25241.034 ± 120.206 ops/s +[info] JmhBench.bench_iI_fmc_ils_0 thrpt 25 13282888.820 ± 1987503.472 ops/s +[info] JmhBench.bench_iI_fmc_ils_1 thrpt 25 19804.166 ± 254.272 ops/s +[info] JmhBench.bench_iI_fmc_ish_0 thrpt 25 9527444.984 ± 1365718.073 ops/s +[info] JmhBench.bench_iI_fmc_ish_1 thrpt 25 9854.394 ± 63.613 ops/s +[info] JmhBench.bench_iI_fmc_lhs_0 thrpt 25 13602398.626 ± 83211.972 ops/s +[info] JmhBench.bench_iI_fmc_lhs_1 thrpt 25 24091.137 ± 702.120 ops/s +[info] JmhBench.bench_iI_fmc_lst_0 thrpt 25 11879620.676 ± 2484773.765 ops/s +[info] JmhBench.bench_iI_fmc_lst_1 thrpt 25 19869.918 ± 309.872 ops/s +[info] JmhBench.bench_iI_fmc_mhs_0 thrpt 25 7592696.142 ± 89270.023 ops/s +[info] JmhBench.bench_iI_fmc_mhs_1 thrpt 25 10258.397 ± 167.902 ops/s +[info] JmhBench.bench_iI_fmc_muq_0 thrpt 25 7592332.150 ± 62151.207 ops/s +[info] JmhBench.bench_iI_fmc_muq_1 thrpt 25 18870.776 ± 230.705 ops/s +[info] JmhBench.bench_iI_fmc_prq_0 thrpt 25 12541183.516 ± 1500910.168 ops/s +[info] JmhBench.bench_iI_fmc_prq_1 thrpt 25 22868.330 ± 128.832 ops/s +[info] JmhBench.bench_iI_fmc_que_0 thrpt 25 8906589.116 ± 1870066.573 ops/s +[info] JmhBench.bench_iI_fmc_que_1 thrpt 25 19476.309 ± 495.972 ops/s +[info] JmhBench.bench_iI_fmc_stm_0 thrpt 25 4046667.237 ± 192062.337 ops/s +[info] JmhBench.bench_iI_fmc_stm_1 thrpt 25 4574.786 ± 302.886 ops/s +[info] JmhBench.bench_iI_fmc_trs_0 thrpt 25 8992324.317 ± 318613.523 ops/s +[info] JmhBench.bench_iI_fmc_trs_1 thrpt 25 13485.785 ± 211.286 ops/s +[info] JmhBench.bench_iI_fmc_vec_0 thrpt 25 11822416.084 ± 142156.960 ops/s +[info] JmhBench.bench_iI_fmc_vec_1 thrpt 25 19049.985 ± 180.215 ops/s +[info] JmhBench.bench_iI_fmc_wra_0 thrpt 25 9234532.837 ± 56329.639 ops/s +[info] JmhBench.bench_iI_fmc_wra_1 thrpt 25 14532.597 ± 495.601 ops/s +[info] JmhBench.bench_iI_mdtc_arb_0 thrpt 25 24564626.307 ± 3669838.680 ops/s +[info] JmhBench.bench_iI_mdtc_arb_1 thrpt 25 5108174.772 ± 678000.865 ops/s +[info] JmhBench.bench_iI_mdtc_arr_0 thrpt 25 26535121.610 ± 6675643.142 ops/s +[info] JmhBench.bench_iI_mdtc_arr_1 thrpt 25 6362301.204 ± 667303.594 ops/s +[info] JmhBench.bench_iI_mdtc_ars_0 thrpt 25 22085306.151 ± 4620817.939 ops/s +[info] JmhBench.bench_iI_mdtc_ars_1 thrpt 25 4685615.766 ± 17669.801 ops/s +[info] JmhBench.bench_iI_mdtc_ast_0 thrpt 25 26824525.002 ± 4209095.277 ops/s +[info] JmhBench.bench_iI_mdtc_ast_1 thrpt 25 6032697.890 ± 1056453.803 ops/s +[info] JmhBench.bench_iI_mdtc_lst_0 thrpt 25 27610068.280 ± 622136.640 ops/s +[info] JmhBench.bench_iI_mdtc_lst_1 thrpt 25 5461025.518 ± 41410.786 ops/s +[info] JmhBench.bench_iI_mdtc_muq_0 thrpt 25 13497138.952 ± 511813.331 ops/s +[info] JmhBench.bench_iI_mdtc_muq_1 thrpt 25 4042199.341 ± 1148290.848 ops/s +[info] JmhBench.bench_iI_mdtc_que_0 thrpt 25 22956764.241 ± 3036473.817 ops/s +[info] JmhBench.bench_iI_mdtc_que_1 thrpt 25 5744120.080 ± 575906.527 ops/s +[info] JmhBench.bench_iI_mdtc_stm_0 thrpt 25 3983205.096 ± 203586.052 ops/s +[info] JmhBench.bench_iI_mdtc_stm_1 thrpt 25 1952832.766 ± 80784.271 ops/s +[info] JmhBench.bench_iI_mdtc_vec_0 thrpt 25 26238397.353 ± 559877.283 ops/s +[info] JmhBench.bench_iI_mdtc_vec_1 thrpt 25 6202379.758 ± 981088.096 ops/s +[info] JmhBench.bench_iI_mdtc_wra_0 thrpt 25 21628417.878 ± 3218213.725 ops/s +[info] JmhBench.bench_iI_mdtc_wra_1 thrpt 25 3989870.457 ± 53216.895 ops/s +[info] JmhBench.bench_iI_sum_arb_0 thrpt 25 13677150.793 ± 196430.726 ops/s +[info] JmhBench.bench_iI_sum_arb_1 thrpt 25 16594.802 ± 1428.913 ops/s +[info] JmhBench.bench_iI_sum_arr_0 thrpt 25 11018038.688 ± 253894.491 ops/s +[info] JmhBench.bench_iI_sum_arr_1 thrpt 25 11801.147 ± 650.632 ops/s +[info] JmhBench.bench_iI_sum_ars_0 thrpt 25 15848085.664 ± 367685.730 ops/s +[info] JmhBench.bench_iI_sum_ars_1 thrpt 25 18191.993 ± 519.274 ops/s +[info] JmhBench.bench_iI_sum_ast_0 thrpt 25 17845530.592 ± 404346.784 ops/s +[info] JmhBench.bench_iI_sum_ast_1 thrpt 25 19225.305 ± 353.326 ops/s +[info] JmhBench.bench_iI_sum_ils_0 thrpt 25 18507893.432 ± 244915.056 ops/s +[info] JmhBench.bench_iI_sum_ils_1 thrpt 25 25558.552 ± 679.197 ops/s +[info] JmhBench.bench_iI_sum_ish_0 thrpt 25 9720641.170 ± 76384.111 ops/s +[info] JmhBench.bench_iI_sum_ish_1 thrpt 25 9909.417 ± 115.705 ops/s +[info] JmhBench.bench_iI_sum_lhs_0 thrpt 25 17819380.379 ± 415435.915 ops/s +[info] JmhBench.bench_iI_sum_lhs_1 thrpt 25 20293.239 ± 574.528 ops/s +[info] JmhBench.bench_iI_sum_lst_0 thrpt 25 17618830.518 ± 247702.793 ops/s +[info] JmhBench.bench_iI_sum_lst_1 thrpt 25 23871.318 ± 821.518 ops/s +[info] JmhBench.bench_iI_sum_mhs_0 thrpt 25 7920199.338 ± 25073.016 ops/s +[info] JmhBench.bench_iI_sum_mhs_1 thrpt 25 9793.600 ± 565.252 ops/s +[info] JmhBench.bench_iI_sum_muq_0 thrpt 25 9990945.720 ± 31626.968 ops/s +[info] JmhBench.bench_iI_sum_muq_1 thrpt 25 12188.211 ± 537.108 ops/s +[info] JmhBench.bench_iI_sum_prq_0 thrpt 25 15093045.013 ± 316551.848 ops/s +[info] JmhBench.bench_iI_sum_prq_1 thrpt 25 18429.280 ± 240.042 ops/s +[info] JmhBench.bench_iI_sum_que_0 thrpt 25 16518172.761 ± 133367.414 ops/s +[info] JmhBench.bench_iI_sum_que_1 thrpt 25 23915.095 ± 480.386 ops/s +[info] JmhBench.bench_iI_sum_stm_0 thrpt 25 3960336.238 ± 213043.035 ops/s +[info] JmhBench.bench_iI_sum_stm_1 thrpt 25 4173.550 ± 349.392 ops/s +[info] JmhBench.bench_iI_sum_trs_0 thrpt 25 9199624.201 ± 56259.658 ops/s +[info] JmhBench.bench_iI_sum_trs_1 thrpt 25 13869.328 ± 530.911 ops/s +[info] JmhBench.bench_iI_sum_vec_0 thrpt 25 14795372.228 ± 324576.889 ops/s +[info] JmhBench.bench_iI_sum_vec_1 thrpt 25 17685.259 ± 588.780 ops/s +[info] JmhBench.bench_iI_sum_wra_0 thrpt 25 10885962.925 ± 557809.799 ops/s +[info] JmhBench.bench_iI_sum_wra_1 thrpt 25 12222.309 ± 300.230 ops/s +[info] JmhBench.bench_iI_trig_arb_0 thrpt 25 127425.280 ± 191.414 ops/s +[info] JmhBench.bench_iI_trig_arb_1 thrpt 25 127.634 ± 0.991 ops/s +[info] JmhBench.bench_iI_trig_arr_0 thrpt 25 127512.032 ± 274.842 ops/s +[info] JmhBench.bench_iI_trig_arr_1 thrpt 25 127.698 ± 0.193 ops/s +[info] JmhBench.bench_iI_trig_ars_0 thrpt 25 127436.172 ± 238.600 ops/s +[info] JmhBench.bench_iI_trig_ars_1 thrpt 25 127.869 ± 0.295 ops/s +[info] JmhBench.bench_iI_trig_ast_0 thrpt 25 127494.683 ± 602.866 ops/s +[info] JmhBench.bench_iI_trig_ast_1 thrpt 25 127.612 ± 0.632 ops/s +[info] JmhBench.bench_iI_trig_ils_0 thrpt 25 127113.022 ± 332.672 ops/s +[info] JmhBench.bench_iI_trig_ils_1 thrpt 25 127.901 ± 0.286 ops/s +[info] JmhBench.bench_iI_trig_ish_0 thrpt 25 127092.736 ± 457.161 ops/s +[info] JmhBench.bench_iI_trig_ish_1 thrpt 25 126.436 ± 0.534 ops/s +[info] JmhBench.bench_iI_trig_lhs_0 thrpt 25 127514.664 ± 231.652 ops/s +[info] JmhBench.bench_iI_trig_lhs_1 thrpt 25 128.273 ± 0.254 ops/s +[info] JmhBench.bench_iI_trig_lst_0 thrpt 25 127022.490 ± 248.515 ops/s +[info] JmhBench.bench_iI_trig_lst_1 thrpt 25 128.115 ± 0.293 ops/s +[info] JmhBench.bench_iI_trig_mhs_0 thrpt 25 127550.698 ± 198.539 ops/s +[info] JmhBench.bench_iI_trig_mhs_1 thrpt 25 126.677 ± 0.344 ops/s +[info] JmhBench.bench_iI_trig_muq_0 thrpt 25 126992.882 ± 183.521 ops/s +[info] JmhBench.bench_iI_trig_muq_1 thrpt 25 128.114 ± 0.314 ops/s +[info] JmhBench.bench_iI_trig_prq_0 thrpt 25 126839.329 ± 193.157 ops/s +[info] JmhBench.bench_iI_trig_prq_1 thrpt 25 127.293 ± 0.463 ops/s +[info] JmhBench.bench_iI_trig_que_0 thrpt 25 126802.756 ± 375.210 ops/s +[info] JmhBench.bench_iI_trig_que_1 thrpt 25 128.106 ± 0.397 ops/s +[info] JmhBench.bench_iI_trig_stm_0 thrpt 25 124764.329 ± 430.653 ops/s +[info] JmhBench.bench_iI_trig_stm_1 thrpt 25 124.498 ± 0.485 ops/s +[info] JmhBench.bench_iI_trig_trs_0 thrpt 25 126340.545 ± 237.574 ops/s +[info] JmhBench.bench_iI_trig_trs_1 thrpt 25 126.564 ± 0.356 ops/s +[info] JmhBench.bench_iI_trig_vec_0 thrpt 25 127474.814 ± 164.684 ops/s +[info] JmhBench.bench_iI_trig_vec_1 thrpt 25 128.064 ± 0.313 ops/s +[info] JmhBench.bench_iI_trig_wra_0 thrpt 25 127331.453 ± 261.737 ops/s +[info] JmhBench.bench_iI_trig_wra_1 thrpt 25 128.023 ± 0.172 ops/s +[info] JmhBench.bench_iS_fmc_arb_0 thrpt 25 20660242.351 ± 209369.661 ops/s +[info] JmhBench.bench_iS_fmc_arb_1 thrpt 25 27552.048 ± 172.476 ops/s +[info] JmhBench.bench_iS_fmc_arr_0 thrpt 25 21065476.479 ± 205039.828 ops/s +[info] JmhBench.bench_iS_fmc_arr_1 thrpt 25 29128.579 ± 250.629 ops/s +[info] JmhBench.bench_iS_fmc_ars_0 thrpt 25 20695026.482 ± 88802.378 ops/s +[info] JmhBench.bench_iS_fmc_ars_1 thrpt 25 27806.607 ± 178.834 ops/s +[info] JmhBench.bench_iS_fmc_ast_0 thrpt 25 25600313.157 ± 317157.005 ops/s +[info] JmhBench.bench_iS_fmc_ast_1 thrpt 25 31883.764 ± 250.718 ops/s +[info] JmhBench.bench_iS_fmc_ils_0 thrpt 25 14362194.610 ± 173685.590 ops/s +[info] JmhBench.bench_iS_fmc_ils_1 thrpt 25 27422.246 ± 475.305 ops/s +[info] JmhBench.bench_iS_fmc_ish_0 thrpt 25 11106914.870 ± 304024.165 ops/s +[info] JmhBench.bench_iS_fmc_ish_1 thrpt 25 6266.457 ± 50.467 ops/s +[info] JmhBench.bench_iS_fmc_lhs_0 thrpt 25 22715287.169 ± 149617.853 ops/s +[info] JmhBench.bench_iS_fmc_lhs_1 thrpt 25 28972.273 ± 328.804 ops/s +[info] JmhBench.bench_iS_fmc_lst_0 thrpt 25 21752765.860 ± 238227.475 ops/s +[info] JmhBench.bench_iS_fmc_lst_1 thrpt 25 27163.131 ± 560.292 ops/s +[info] JmhBench.bench_iS_fmc_mhs_0 thrpt 25 13668175.591 ± 75209.492 ops/s +[info] JmhBench.bench_iS_fmc_mhs_1 thrpt 25 5765.118 ± 29.925 ops/s +[info] JmhBench.bench_iS_fmc_muq_0 thrpt 25 9638550.537 ± 157743.693 ops/s +[info] JmhBench.bench_iS_fmc_muq_1 thrpt 25 24535.554 ± 319.728 ops/s +[info] JmhBench.bench_iS_fmc_prq_0 thrpt 25 21586397.825 ± 513866.266 ops/s +[info] JmhBench.bench_iS_fmc_prq_1 thrpt 25 20012.133 ± 438.172 ops/s +[info] JmhBench.bench_iS_fmc_que_0 thrpt 25 19349313.994 ± 291925.270 ops/s +[info] JmhBench.bench_iS_fmc_que_1 thrpt 25 27455.560 ± 249.001 ops/s +[info] JmhBench.bench_iS_fmc_stm_0 thrpt 25 3967962.807 ± 230987.833 ops/s +[info] JmhBench.bench_iS_fmc_stm_1 thrpt 25 4584.761 ± 288.495 ops/s +[info] JmhBench.bench_iS_fmc_trs_0 thrpt 25 10500586.619 ± 543350.914 ops/s +[info] JmhBench.bench_iS_fmc_trs_1 thrpt 25 7758.939 ± 260.818 ops/s +[info] JmhBench.bench_iS_fmc_vec_0 thrpt 25 16202222.266 ± 1185179.036 ops/s +[info] JmhBench.bench_iS_fmc_vec_1 thrpt 25 20328.860 ± 357.803 ops/s +[info] JmhBench.bench_iS_fmc_wra_0 thrpt 25 22170885.258 ± 82756.344 ops/s +[info] JmhBench.bench_iS_fmc_wra_1 thrpt 25 28812.002 ± 257.772 ops/s +[info] JmhBench.bench_iS_htrg_arb_0 thrpt 25 99861.730 ± 209.117 ops/s +[info] JmhBench.bench_iS_htrg_arb_1 thrpt 25 98.417 ± 0.171 ops/s +[info] JmhBench.bench_iS_htrg_arr_0 thrpt 25 99625.567 ± 163.801 ops/s +[info] JmhBench.bench_iS_htrg_arr_1 thrpt 25 98.296 ± 0.210 ops/s +[info] JmhBench.bench_iS_htrg_ars_0 thrpt 25 99879.643 ± 156.190 ops/s +[info] JmhBench.bench_iS_htrg_ars_1 thrpt 25 98.363 ± 0.183 ops/s +[info] JmhBench.bench_iS_htrg_ast_0 thrpt 25 99380.973 ± 336.195 ops/s +[info] JmhBench.bench_iS_htrg_ast_1 thrpt 25 98.387 ± 0.198 ops/s +[info] JmhBench.bench_iS_htrg_ils_0 thrpt 25 99964.680 ± 174.309 ops/s +[info] JmhBench.bench_iS_htrg_ils_1 thrpt 25 98.294 ± 0.227 ops/s +[info] JmhBench.bench_iS_htrg_ish_0 thrpt 25 99433.408 ± 247.431 ops/s +[info] JmhBench.bench_iS_htrg_ish_1 thrpt 25 97.311 ± 0.150 ops/s +[info] JmhBench.bench_iS_htrg_lhs_0 thrpt 25 99265.630 ± 291.685 ops/s +[info] JmhBench.bench_iS_htrg_lhs_1 thrpt 25 98.447 ± 0.194 ops/s +[info] JmhBench.bench_iS_htrg_lst_0 thrpt 25 99902.619 ± 186.008 ops/s +[info] JmhBench.bench_iS_htrg_lst_1 thrpt 25 98.498 ± 0.209 ops/s +[info] JmhBench.bench_iS_htrg_mhs_0 thrpt 25 99003.158 ± 187.515 ops/s +[info] JmhBench.bench_iS_htrg_mhs_1 thrpt 25 96.822 ± 0.413 ops/s +[info] JmhBench.bench_iS_htrg_muq_0 thrpt 25 100012.595 ± 276.322 ops/s +[info] JmhBench.bench_iS_htrg_muq_1 thrpt 25 98.282 ± 0.258 ops/s +[info] JmhBench.bench_iS_htrg_prq_0 thrpt 25 99909.274 ± 269.579 ops/s +[info] JmhBench.bench_iS_htrg_prq_1 thrpt 25 97.929 ± 0.434 ops/s +[info] JmhBench.bench_iS_htrg_que_0 thrpt 25 99927.315 ± 151.573 ops/s +[info] JmhBench.bench_iS_htrg_que_1 thrpt 25 98.434 ± 0.299 ops/s +[info] JmhBench.bench_iS_htrg_stm_0 thrpt 25 98035.244 ± 198.742 ops/s +[info] JmhBench.bench_iS_htrg_stm_1 thrpt 25 96.635 ± 0.265 ops/s +[info] JmhBench.bench_iS_htrg_trs_0 thrpt 25 99717.022 ± 174.509 ops/s +[info] JmhBench.bench_iS_htrg_trs_1 thrpt 25 97.415 ± 0.245 ops/s +[info] JmhBench.bench_iS_htrg_vec_0 thrpt 25 99966.597 ± 221.390 ops/s +[info] JmhBench.bench_iS_htrg_vec_1 thrpt 25 98.199 ± 0.172 ops/s +[info] JmhBench.bench_iS_htrg_wra_0 thrpt 25 99874.236 ± 179.219 ops/s +[info] JmhBench.bench_iS_htrg_wra_1 thrpt 25 98.288 ± 0.196 ops/s +[info] JmhBench.bench_iS_mdtc_arb_0 thrpt 25 2097899.982 ± 23027.980 ops/s +[info] JmhBench.bench_iS_mdtc_arb_1 thrpt 25 1887060.512 ± 26123.879 ops/s +[info] JmhBench.bench_iS_mdtc_arr_0 thrpt 25 2082275.844 ± 34790.548 ops/s +[info] JmhBench.bench_iS_mdtc_arr_1 thrpt 25 1845126.936 ± 20293.322 ops/s +[info] JmhBench.bench_iS_mdtc_ars_0 thrpt 25 2009995.771 ± 76910.203 ops/s +[info] JmhBench.bench_iS_mdtc_ars_1 thrpt 25 1870000.666 ± 40742.596 ops/s +[info] JmhBench.bench_iS_mdtc_ast_0 thrpt 25 2178819.994 ± 221518.015 ops/s +[info] JmhBench.bench_iS_mdtc_ast_1 thrpt 25 2123261.170 ± 154628.266 ops/s +[info] JmhBench.bench_iS_mdtc_lst_0 thrpt 25 2328914.058 ± 32074.037 ops/s +[info] JmhBench.bench_iS_mdtc_lst_1 thrpt 25 2041954.017 ± 42234.460 ops/s +[info] JmhBench.bench_iS_mdtc_muq_0 thrpt 25 2100724.686 ± 25639.833 ops/s +[info] JmhBench.bench_iS_mdtc_muq_1 thrpt 25 1911625.218 ± 23276.435 ops/s +[info] JmhBench.bench_iS_mdtc_que_0 thrpt 25 2329239.766 ± 30110.120 ops/s +[info] JmhBench.bench_iS_mdtc_que_1 thrpt 25 2006283.293 ± 45267.124 ops/s +[info] JmhBench.bench_iS_mdtc_stm_0 thrpt 25 1639832.628 ± 33564.535 ops/s +[info] JmhBench.bench_iS_mdtc_stm_1 thrpt 25 1559980.428 ± 58314.308 ops/s +[info] JmhBench.bench_iS_mdtc_vec_0 thrpt 25 2075815.450 ± 25831.246 ops/s +[info] JmhBench.bench_iS_mdtc_vec_1 thrpt 25 1774053.986 ± 24836.414 ops/s +[info] JmhBench.bench_iS_mdtc_wra_0 thrpt 25 2082465.430 ± 54266.074 ops/s +[info] JmhBench.bench_iS_mdtc_wra_1 thrpt 25 1852111.865 ± 40534.660 ops/s +[info] JmhBench.bench_iS_nbr_arb_0 thrpt 25 15093643.409 ± 312819.522 ops/s +[info] JmhBench.bench_iS_nbr_arb_1 thrpt 25 16536.134 ± 44.776 ops/s +[info] JmhBench.bench_iS_nbr_arr_0 thrpt 25 13681116.218 ± 2069446.439 ops/s +[info] JmhBench.bench_iS_nbr_arr_1 thrpt 25 16216.197 ± 153.748 ops/s +[info] JmhBench.bench_iS_nbr_ars_0 thrpt 25 15231820.552 ± 71198.860 ops/s +[info] JmhBench.bench_iS_nbr_ars_1 thrpt 25 16431.260 ± 58.286 ops/s +[info] JmhBench.bench_iS_nbr_ast_0 thrpt 25 16422832.425 ± 160552.096 ops/s +[info] JmhBench.bench_iS_nbr_ast_1 thrpt 25 17926.164 ± 56.371 ops/s +[info] JmhBench.bench_iS_nbr_ils_0 thrpt 25 13584445.254 ± 117216.650 ops/s +[info] JmhBench.bench_iS_nbr_ils_1 thrpt 25 16068.090 ± 2653.041 ops/s +[info] JmhBench.bench_iS_nbr_ish_0 thrpt 25 13215826.280 ± 1505586.375 ops/s +[info] JmhBench.bench_iS_nbr_ish_1 thrpt 25 6105.864 ± 59.147 ops/s +[info] JmhBench.bench_iS_nbr_lhs_0 thrpt 25 17670261.677 ± 161144.754 ops/s +[info] JmhBench.bench_iS_nbr_lhs_1 thrpt 25 20389.690 ± 2324.186 ops/s +[info] JmhBench.bench_iS_nbr_lst_0 thrpt 25 16357641.101 ± 60754.533 ops/s +[info] JmhBench.bench_iS_nbr_lst_1 thrpt 25 18433.714 ± 2057.837 ops/s +[info] JmhBench.bench_iS_nbr_mhs_0 thrpt 25 11812511.313 ± 88367.679 ops/s +[info] JmhBench.bench_iS_nbr_mhs_1 thrpt 25 5600.543 ± 17.450 ops/s +[info] JmhBench.bench_iS_nbr_muq_0 thrpt 25 11691763.921 ± 46245.840 ops/s +[info] JmhBench.bench_iS_nbr_muq_1 thrpt 25 18182.019 ± 4847.548 ops/s +[info] JmhBench.bench_iS_nbr_prq_0 thrpt 25 15349594.898 ± 76157.252 ops/s +[info] JmhBench.bench_iS_nbr_prq_1 thrpt 25 15811.057 ± 4892.669 ops/s +[info] JmhBench.bench_iS_nbr_que_0 thrpt 25 15718603.188 ± 111532.970 ops/s +[info] JmhBench.bench_iS_nbr_que_1 thrpt 25 19776.658 ± 2178.055 ops/s +[info] JmhBench.bench_iS_nbr_stm_0 thrpt 25 4132893.909 ± 194559.967 ops/s +[info] JmhBench.bench_iS_nbr_stm_1 thrpt 25 4726.500 ± 338.295 ops/s +[info] JmhBench.bench_iS_nbr_trs_0 thrpt 25 10698968.841 ± 70991.716 ops/s +[info] JmhBench.bench_iS_nbr_trs_1 thrpt 25 9598.236 ± 785.458 ops/s +[info] JmhBench.bench_iS_nbr_vec_0 thrpt 25 14664874.142 ± 77772.684 ops/s +[info] JmhBench.bench_iS_nbr_vec_1 thrpt 25 17564.678 ± 3868.762 ops/s +[info] JmhBench.bench_iS_nbr_wra_0 thrpt 25 15708895.657 ± 50767.513 ops/s +[info] JmhBench.bench_iS_nbr_wra_1 thrpt 25 16808.537 ± 132.263 ops/s +[info] JmhBench.bench_spI_pfmc_arb_0 thrpt 25 122026.458 ± 3338.778 ops/s +[info] JmhBench.bench_spI_pfmc_arb_1 thrpt 25 31955.438 ± 3265.388 ops/s +[info] JmhBench.bench_spI_pfmc_arr_0 thrpt 25 123793.942 ± 1465.776 ops/s +[info] JmhBench.bench_spI_pfmc_arr_1 thrpt 25 55926.755 ± 582.057 ops/s +[info] JmhBench.bench_spI_pfmc_ars_0 thrpt 25 123838.933 ± 2528.316 ops/s +[info] JmhBench.bench_spI_pfmc_ars_1 thrpt 25 31911.979 ± 1354.094 ops/s +[info] JmhBench.bench_spI_pfmc_ish_0 thrpt 25 101313.969 ± 1541.856 ops/s +[info] JmhBench.bench_spI_pfmc_ish_1 thrpt 25 17564.133 ± 179.150 ops/s +[info] JmhBench.bench_spI_pfmc_jix_0 thrpt 25 124602.760 ± 1339.185 ops/s +[info] JmhBench.bench_spI_pfmc_jix_1 thrpt 25 46913.983 ± 1399.403 ops/s +[info] JmhBench.bench_spI_pfmc_jln_0 thrpt 25 122034.673 ± 2914.257 ops/s +[info] JmhBench.bench_spI_pfmc_jln_1 thrpt 25 21508.984 ± 2283.637 ops/s +[info] JmhBench.bench_spI_pfmc_mhs_0 thrpt 25 149803.545 ± 3416.269 ops/s +[info] JmhBench.bench_spI_pfmc_mhs_1 thrpt 25 18125.518 ± 1343.974 ops/s +[info] JmhBench.bench_spI_pfmc_vec_0 thrpt 25 121742.883 ± 1495.545 ops/s +[info] JmhBench.bench_spI_pfmc_vec_1 thrpt 25 30318.891 ± 1097.725 ops/s +[info] JmhBench.bench_spI_pfmc_wra_0 thrpt 25 124890.583 ± 706.846 ops/s +[info] JmhBench.bench_spI_pfmc_wra_1 thrpt 25 26081.649 ± 187.747 ops/s +[info] JmhBench.bench_spI_psum_arb_0 thrpt 25 120576.423 ± 1468.003 ops/s +[info] JmhBench.bench_spI_psum_arb_1 thrpt 25 33624.042 ± 476.147 ops/s +[info] JmhBench.bench_spI_psum_arr_0 thrpt 25 119288.186 ± 1423.925 ops/s +[info] JmhBench.bench_spI_psum_arr_1 thrpt 25 67775.549 ± 3304.787 ops/s +[info] JmhBench.bench_spI_psum_ars_0 thrpt 25 119588.690 ± 1729.515 ops/s +[info] JmhBench.bench_spI_psum_ars_1 thrpt 25 36403.156 ± 302.723 ops/s +[info] JmhBench.bench_spI_psum_ish_0 thrpt 25 102179.787 ± 1467.909 ops/s +[info] JmhBench.bench_spI_psum_ish_1 thrpt 25 18745.407 ± 97.495 ops/s +[info] JmhBench.bench_spI_psum_jix_0 thrpt 25 121718.375 ± 1209.540 ops/s +[info] JmhBench.bench_spI_psum_jix_1 thrpt 25 57101.512 ± 9299.544 ops/s +[info] JmhBench.bench_spI_psum_jln_0 thrpt 25 118907.263 ± 1689.327 ops/s +[info] JmhBench.bench_spI_psum_jln_1 thrpt 25 24048.469 ± 2075.903 ops/s +[info] JmhBench.bench_spI_psum_mhs_0 thrpt 25 146367.207 ± 2070.261 ops/s +[info] JmhBench.bench_spI_psum_mhs_1 thrpt 25 17278.757 ± 65.427 ops/s +[info] JmhBench.bench_spI_psum_vec_0 thrpt 25 118634.736 ± 2482.411 ops/s +[info] JmhBench.bench_spI_psum_vec_1 thrpt 25 35514.411 ± 316.920 ops/s +[info] JmhBench.bench_spI_psum_wra_0 thrpt 25 121026.432 ± 976.185 ops/s +[info] JmhBench.bench_spI_psum_wra_1 thrpt 25 26725.301 ± 163.464 ops/s +[info] JmhBench.bench_spI_ptrig_arb_0 thrpt 25 89470.600 ± 767.024 ops/s +[info] JmhBench.bench_spI_ptrig_arb_1 thrpt 25 794.995 ± 3.720 ops/s +[info] JmhBench.bench_spI_ptrig_arr_0 thrpt 25 89645.535 ± 571.222 ops/s +[info] JmhBench.bench_spI_ptrig_arr_1 thrpt 25 804.330 ± 5.509 ops/s +[info] JmhBench.bench_spI_ptrig_ars_0 thrpt 25 89499.574 ± 825.471 ops/s +[info] JmhBench.bench_spI_ptrig_ars_1 thrpt 25 782.195 ± 20.628 ops/s +[info] JmhBench.bench_spI_ptrig_ish_0 thrpt 25 77761.399 ± 661.445 ops/s +[info] JmhBench.bench_spI_ptrig_ish_1 thrpt 25 749.151 ± 5.523 ops/s +[info] JmhBench.bench_spI_ptrig_jix_0 thrpt 25 88337.024 ± 1120.008 ops/s +[info] JmhBench.bench_spI_ptrig_jix_1 thrpt 25 801.746 ± 7.369 ops/s +[info] JmhBench.bench_spI_ptrig_jln_0 thrpt 25 87959.641 ± 364.156 ops/s +[info] JmhBench.bench_spI_ptrig_jln_1 thrpt 25 739.279 ± 4.313 ops/s +[info] JmhBench.bench_spI_ptrig_mhs_0 thrpt 25 84923.215 ± 359.009 ops/s +[info] JmhBench.bench_spI_ptrig_mhs_1 thrpt 25 776.755 ± 4.260 ops/s +[info] JmhBench.bench_spI_ptrig_vec_0 thrpt 25 87511.031 ± 774.512 ops/s +[info] JmhBench.bench_spI_ptrig_vec_1 thrpt 25 788.650 ± 9.614 ops/s +[info] JmhBench.bench_spI_ptrig_wra_0 thrpt 25 89686.421 ± 971.655 ops/s +[info] JmhBench.bench_spI_ptrig_wra_1 thrpt 25 787.302 ± 5.752 ops/s +[info] JmhBench.bench_spS_pfmc_arb_0 thrpt 25 120005.523 ± 3795.221 ops/s +[info] JmhBench.bench_spS_pfmc_arb_1 thrpt 25 39776.834 ± 2984.305 ops/s +[info] JmhBench.bench_spS_pfmc_arr_0 thrpt 25 122280.558 ± 1336.325 ops/s +[info] JmhBench.bench_spS_pfmc_arr_1 thrpt 25 45979.842 ± 477.262 ops/s +[info] JmhBench.bench_spS_pfmc_ars_0 thrpt 25 123052.552 ± 1867.415 ops/s +[info] JmhBench.bench_spS_pfmc_ars_1 thrpt 25 40875.035 ± 2405.416 ops/s +[info] JmhBench.bench_spS_pfmc_ish_0 thrpt 25 103990.927 ± 1116.911 ops/s +[info] JmhBench.bench_spS_pfmc_ish_1 thrpt 25 16612.000 ± 1061.350 ops/s +[info] JmhBench.bench_spS_pfmc_jix_0 thrpt 25 118688.056 ± 3835.042 ops/s +[info] JmhBench.bench_spS_pfmc_jix_1 thrpt 25 48450.737 ± 209.135 ops/s +[info] JmhBench.bench_spS_pfmc_jln_0 thrpt 25 115844.257 ± 3520.553 ops/s +[info] JmhBench.bench_spS_pfmc_jln_1 thrpt 25 16845.440 ± 1730.417 ops/s +[info] JmhBench.bench_spS_pfmc_mhs_0 thrpt 25 131205.292 ± 1626.360 ops/s +[info] JmhBench.bench_spS_pfmc_mhs_1 thrpt 25 18525.922 ± 1488.423 ops/s +[info] JmhBench.bench_spS_pfmc_vec_0 thrpt 25 120071.251 ± 1925.718 ops/s +[info] JmhBench.bench_spS_pfmc_vec_1 thrpt 25 43352.191 ± 472.814 ops/s +[info] JmhBench.bench_spS_pfmc_wra_0 thrpt 25 124290.660 ± 2022.406 ops/s +[info] JmhBench.bench_spS_pfmc_wra_1 thrpt 25 41701.119 ± 2265.434 ops/s +[info] JmhBench.bench_spS_phtrg_arb_0 thrpt 25 81797.538 ± 548.798 ops/s +[info] JmhBench.bench_spS_phtrg_arb_1 thrpt 25 570.817 ± 5.242 ops/s +[info] JmhBench.bench_spS_phtrg_arr_0 thrpt 25 82795.075 ± 685.318 ops/s +[info] JmhBench.bench_spS_phtrg_arr_1 thrpt 25 571.434 ± 4.941 ops/s +[info] JmhBench.bench_spS_phtrg_ars_0 thrpt 25 82077.986 ± 824.614 ops/s +[info] JmhBench.bench_spS_phtrg_ars_1 thrpt 25 573.351 ± 3.988 ops/s +[info] JmhBench.bench_spS_phtrg_ish_0 thrpt 25 75813.040 ± 434.354 ops/s +[info] JmhBench.bench_spS_phtrg_ish_1 thrpt 25 532.699 ± 2.842 ops/s +[info] JmhBench.bench_spS_phtrg_jix_0 thrpt 25 82789.954 ± 556.422 ops/s +[info] JmhBench.bench_spS_phtrg_jix_1 thrpt 25 566.457 ± 11.434 ops/s +[info] JmhBench.bench_spS_phtrg_jln_0 thrpt 25 82330.442 ± 848.242 ops/s +[info] JmhBench.bench_spS_phtrg_jln_1 thrpt 25 529.041 ± 2.995 ops/s +[info] JmhBench.bench_spS_phtrg_mhs_0 thrpt 25 77381.806 ± 381.747 ops/s +[info] JmhBench.bench_spS_phtrg_mhs_1 thrpt 25 555.069 ± 2.452 ops/s +[info] JmhBench.bench_spS_phtrg_vec_0 thrpt 25 82664.902 ± 862.091 ops/s +[info] JmhBench.bench_spS_phtrg_vec_1 thrpt 25 573.708 ± 3.414 ops/s +[info] JmhBench.bench_spS_phtrg_wra_0 thrpt 25 81739.577 ± 929.248 ops/s +[info] JmhBench.bench_spS_phtrg_wra_1 thrpt 25 572.521 ± 4.090 ops/s +[info] JmhBench.bench_spS_pnbr_arb_0 thrpt 25 125276.095 ± 1514.002 ops/s +[info] JmhBench.bench_spS_pnbr_arb_1 thrpt 25 29841.668 ± 254.424 ops/s +[info] JmhBench.bench_spS_pnbr_arr_0 thrpt 25 123884.469 ± 2242.992 ops/s +[info] JmhBench.bench_spS_pnbr_arr_1 thrpt 25 34171.150 ± 571.312 ops/s +[info] JmhBench.bench_spS_pnbr_ars_0 thrpt 25 124496.684 ± 2599.330 ops/s +[info] JmhBench.bench_spS_pnbr_ars_1 thrpt 25 34250.875 ± 2183.277 ops/s +[info] JmhBench.bench_spS_pnbr_ish_0 thrpt 25 104652.916 ± 1160.961 ops/s +[info] JmhBench.bench_spS_pnbr_ish_1 thrpt 25 17229.432 ± 109.380 ops/s +[info] JmhBench.bench_spS_pnbr_jix_0 thrpt 25 123150.422 ± 1242.209 ops/s +[info] JmhBench.bench_spS_pnbr_jix_1 thrpt 25 38114.715 ± 4984.153 ops/s +[info] JmhBench.bench_spS_pnbr_jln_0 thrpt 25 120365.993 ± 1728.280 ops/s +[info] JmhBench.bench_spS_pnbr_jln_1 thrpt 25 14622.187 ± 1880.274 ops/s +[info] JmhBench.bench_spS_pnbr_mhs_0 thrpt 25 135428.927 ± 4568.452 ops/s +[info] JmhBench.bench_spS_pnbr_mhs_1 thrpt 25 15520.900 ± 88.414 ops/s +[info] JmhBench.bench_spS_pnbr_vec_0 thrpt 25 125389.867 ± 1739.809 ops/s +[info] JmhBench.bench_spS_pnbr_vec_1 thrpt 25 32482.390 ± 250.674 ops/s +[info] JmhBench.bench_spS_pnbr_wra_0 thrpt 25 124679.925 ± 2175.996 ops/s +[info] JmhBench.bench_spS_pnbr_wra_1 thrpt 25 34420.077 ± 2410.935 ops/s +[info] JmhBench.bench_ssI_fmc_arb_0 thrpt 25 9250153.823 ± 888445.531 ops/s +[info] JmhBench.bench_ssI_fmc_arb_1 thrpt 25 9355.086 ± 742.574 ops/s +[info] JmhBench.bench_ssI_fmc_arr_0 thrpt 25 16052416.844 ± 961240.618 ops/s +[info] JmhBench.bench_ssI_fmc_arr_1 thrpt 25 76936.655 ± 1407.831 ops/s +[info] JmhBench.bench_ssI_fmc_ars_0 thrpt 25 8782587.762 ± 742059.840 ops/s +[info] JmhBench.bench_ssI_fmc_ars_1 thrpt 25 9050.227 ± 780.554 ops/s +[info] JmhBench.bench_ssI_fmc_ast_0 thrpt 25 8141416.177 ± 637829.950 ops/s +[info] JmhBench.bench_ssI_fmc_ast_1 thrpt 25 10100.534 ± 503.490 ops/s +[info] JmhBench.bench_ssI_fmc_ils_0 thrpt 25 6515951.736 ± 398883.162 ops/s +[info] JmhBench.bench_ssI_fmc_ils_1 thrpt 25 9154.419 ± 328.846 ops/s +[info] JmhBench.bench_ssI_fmc_ish_0 thrpt 25 7957383.510 ± 807469.463 ops/s +[info] JmhBench.bench_ssI_fmc_ish_1 thrpt 25 8534.034 ± 76.508 ops/s +[info] JmhBench.bench_ssI_fmc_jix_0 thrpt 25 10325320.390 ± 737830.832 ops/s +[info] JmhBench.bench_ssI_fmc_jix_1 thrpt 25 23670.508 ± 1300.536 ops/s +[info] JmhBench.bench_ssI_fmc_jln_0 thrpt 25 10147871.532 ± 321143.743 ops/s +[info] JmhBench.bench_ssI_fmc_jln_1 thrpt 25 25868.179 ± 890.519 ops/s +[info] JmhBench.bench_ssI_fmc_lhs_0 thrpt 25 8167530.225 ± 68842.632 ops/s +[info] JmhBench.bench_ssI_fmc_lhs_1 thrpt 25 10305.165 ± 649.741 ops/s +[info] JmhBench.bench_ssI_fmc_lst_0 thrpt 25 7450089.752 ± 860591.681 ops/s +[info] JmhBench.bench_ssI_fmc_lst_1 thrpt 25 5580.838 ± 89.269 ops/s +[info] JmhBench.bench_ssI_fmc_mhs_0 thrpt 25 7298703.199 ± 478617.272 ops/s +[info] JmhBench.bench_ssI_fmc_mhs_1 thrpt 25 10089.122 ± 207.164 ops/s +[info] JmhBench.bench_ssI_fmc_muq_0 thrpt 25 3306530.823 ± 240247.734 ops/s +[info] JmhBench.bench_ssI_fmc_muq_1 thrpt 25 4707.455 ± 112.368 ops/s +[info] JmhBench.bench_ssI_fmc_prq_0 thrpt 25 8466869.742 ± 656493.257 ops/s +[info] JmhBench.bench_ssI_fmc_prq_1 thrpt 25 8092.785 ± 356.931 ops/s +[info] JmhBench.bench_ssI_fmc_que_0 thrpt 25 7342169.186 ± 237418.673 ops/s +[info] JmhBench.bench_ssI_fmc_que_1 thrpt 25 10399.761 ± 166.829 ops/s +[info] JmhBench.bench_ssI_fmc_stm_0 thrpt 25 7367724.075 ± 1129148.866 ops/s +[info] JmhBench.bench_ssI_fmc_stm_1 thrpt 25 8236.488 ± 2628.423 ops/s +[info] JmhBench.bench_ssI_fmc_trs_0 thrpt 25 7780121.129 ± 108678.562 ops/s +[info] JmhBench.bench_ssI_fmc_trs_1 thrpt 25 7679.958 ± 200.984 ops/s +[info] JmhBench.bench_ssI_fmc_vec_0 thrpt 25 9118653.340 ± 247215.668 ops/s +[info] JmhBench.bench_ssI_fmc_vec_1 thrpt 25 9589.100 ± 1791.009 ops/s +[info] JmhBench.bench_ssI_fmc_wra_0 thrpt 25 9262801.298 ± 456880.630 ops/s +[info] JmhBench.bench_ssI_fmc_wra_1 thrpt 25 8448.089 ± 751.665 ops/s +[info] JmhBench.bench_ssI_mdtc_arb_0 thrpt 25 6947123.970 ± 215862.500 ops/s +[info] JmhBench.bench_ssI_mdtc_arb_1 thrpt 25 4216205.596 ± 81236.212 ops/s +[info] JmhBench.bench_ssI_mdtc_arr_0 thrpt 25 7877041.387 ± 175458.225 ops/s +[info] JmhBench.bench_ssI_mdtc_arr_1 thrpt 25 4754725.775 ± 49021.630 ops/s +[info] JmhBench.bench_ssI_mdtc_ars_0 thrpt 25 6745190.439 ± 476861.322 ops/s +[info] JmhBench.bench_ssI_mdtc_ars_1 thrpt 25 4262939.806 ± 47183.691 ops/s +[info] JmhBench.bench_ssI_mdtc_ast_0 thrpt 25 6854949.067 ± 68537.150 ops/s +[info] JmhBench.bench_ssI_mdtc_ast_1 thrpt 25 4025499.567 ± 104981.719 ops/s +[info] JmhBench.bench_ssI_mdtc_jix_0 thrpt 25 5227027.825 ± 94030.854 ops/s +[info] JmhBench.bench_ssI_mdtc_jix_1 thrpt 25 3156089.920 ± 140112.857 ops/s +[info] JmhBench.bench_ssI_mdtc_jln_0 thrpt 25 5380258.918 ± 245567.821 ops/s +[info] JmhBench.bench_ssI_mdtc_jln_1 thrpt 25 3130643.665 ± 184628.801 ops/s +[info] JmhBench.bench_ssI_mdtc_lst_0 thrpt 25 6075277.338 ± 117938.332 ops/s +[info] JmhBench.bench_ssI_mdtc_lst_1 thrpt 25 4140145.442 ± 438069.320 ops/s +[info] JmhBench.bench_ssI_mdtc_muq_0 thrpt 25 2785261.264 ± 65612.901 ops/s +[info] JmhBench.bench_ssI_mdtc_muq_1 thrpt 25 1364771.167 ± 23748.839 ops/s +[info] JmhBench.bench_ssI_mdtc_que_0 thrpt 25 5122120.176 ± 104187.267 ops/s +[info] JmhBench.bench_ssI_mdtc_que_1 thrpt 25 2820520.670 ± 33621.491 ops/s +[info] JmhBench.bench_ssI_mdtc_stm_0 thrpt 25 6030630.852 ± 162783.134 ops/s +[info] JmhBench.bench_ssI_mdtc_stm_1 thrpt 25 4232143.930 ± 34806.929 ops/s +[info] JmhBench.bench_ssI_mdtc_vec_0 thrpt 25 6841092.491 ± 167568.603 ops/s +[info] JmhBench.bench_ssI_mdtc_vec_1 thrpt 25 4182368.069 ± 164073.205 ops/s +[info] JmhBench.bench_ssI_mdtc_wra_0 thrpt 25 6514842.858 ± 82706.838 ops/s +[info] JmhBench.bench_ssI_mdtc_wra_1 thrpt 25 3742865.389 ± 25362.161 ops/s +[info] JmhBench.bench_ssI_sum_arb_0 thrpt 25 9715905.252 ± 226945.687 ops/s +[info] JmhBench.bench_ssI_sum_arb_1 thrpt 25 11672.133 ± 1735.140 ops/s +[info] JmhBench.bench_ssI_sum_arr_0 thrpt 25 24618518.546 ± 257756.997 ops/s +[info] JmhBench.bench_ssI_sum_arr_1 thrpt 25 55474.149 ± 161.288 ops/s +[info] JmhBench.bench_ssI_sum_ars_0 thrpt 25 9485950.081 ± 462558.656 ops/s +[info] JmhBench.bench_ssI_sum_ars_1 thrpt 25 11299.644 ± 1711.369 ops/s +[info] JmhBench.bench_ssI_sum_ast_0 thrpt 25 8429742.725 ± 392860.041 ops/s +[info] JmhBench.bench_ssI_sum_ast_1 thrpt 25 11110.412 ± 1268.393 ops/s +[info] JmhBench.bench_ssI_sum_ils_0 thrpt 25 15909722.568 ± 349441.040 ops/s +[info] JmhBench.bench_ssI_sum_ils_1 thrpt 25 9678.768 ± 235.065 ops/s +[info] JmhBench.bench_ssI_sum_ish_0 thrpt 25 13619341.652 ± 147547.281 ops/s +[info] JmhBench.bench_ssI_sum_ish_1 thrpt 25 6212.693 ± 56.017 ops/s +[info] JmhBench.bench_ssI_sum_jix_0 thrpt 25 26310816.947 ± 153269.402 ops/s +[info] JmhBench.bench_ssI_sum_jix_1 thrpt 25 109104.485 ± 12951.334 ops/s +[info] JmhBench.bench_ssI_sum_jln_0 thrpt 25 24230659.032 ± 130893.341 ops/s +[info] JmhBench.bench_ssI_sum_jln_1 thrpt 25 44752.456 ± 3333.934 ops/s +[info] JmhBench.bench_ssI_sum_lhs_0 thrpt 25 9055689.390 ± 262812.071 ops/s +[info] JmhBench.bench_ssI_sum_lhs_1 thrpt 25 11230.558 ± 1611.547 ops/s +[info] JmhBench.bench_ssI_sum_lst_0 thrpt 25 13729453.044 ± 327014.089 ops/s +[info] JmhBench.bench_ssI_sum_lst_1 thrpt 25 17518.869 ± 183.762 ops/s +[info] JmhBench.bench_ssI_sum_mhs_0 thrpt 25 10573254.637 ± 79889.822 ops/s +[info] JmhBench.bench_ssI_sum_mhs_1 thrpt 25 4901.874 ± 36.980 ops/s +[info] JmhBench.bench_ssI_sum_muq_0 thrpt 25 3809588.908 ± 286246.301 ops/s +[info] JmhBench.bench_ssI_sum_muq_1 thrpt 25 5193.727 ± 65.227 ops/s +[info] JmhBench.bench_ssI_sum_prq_0 thrpt 25 8280044.390 ± 169984.161 ops/s +[info] JmhBench.bench_ssI_sum_prq_1 thrpt 25 10344.875 ± 908.924 ops/s +[info] JmhBench.bench_ssI_sum_que_0 thrpt 25 9207756.772 ± 1668707.276 ops/s +[info] JmhBench.bench_ssI_sum_que_1 thrpt 25 11101.518 ± 132.127 ops/s +[info] JmhBench.bench_ssI_sum_stm_0 thrpt 25 13160208.480 ± 222285.592 ops/s +[info] JmhBench.bench_ssI_sum_stm_1 thrpt 25 9703.721 ± 199.536 ops/s +[info] JmhBench.bench_ssI_sum_trs_0 thrpt 25 11550300.294 ± 168610.279 ops/s +[info] JmhBench.bench_ssI_sum_trs_1 thrpt 25 8148.181 ± 221.631 ops/s +[info] JmhBench.bench_ssI_sum_vec_0 thrpt 25 8807366.131 ± 181526.563 ops/s +[info] JmhBench.bench_ssI_sum_vec_1 thrpt 25 11708.860 ± 1701.693 ops/s +[info] JmhBench.bench_ssI_sum_wra_0 thrpt 25 8933774.049 ± 32608.515 ops/s +[info] JmhBench.bench_ssI_sum_wra_1 thrpt 25 10768.478 ± 1900.586 ops/s +[info] JmhBench.bench_ssI_trig_arb_0 thrpt 25 127701.752 ± 300.487 ops/s +[info] JmhBench.bench_ssI_trig_arb_1 thrpt 25 127.874 ± 0.244 ops/s +[info] JmhBench.bench_ssI_trig_arr_0 thrpt 25 128529.229 ± 147.708 ops/s +[info] JmhBench.bench_ssI_trig_arr_1 thrpt 25 128.141 ± 0.184 ops/s +[info] JmhBench.bench_ssI_trig_ars_0 thrpt 25 127889.038 ± 160.223 ops/s +[info] JmhBench.bench_ssI_trig_ars_1 thrpt 25 127.690 ± 0.348 ops/s +[info] JmhBench.bench_ssI_trig_ast_0 thrpt 25 127744.419 ± 223.829 ops/s +[info] JmhBench.bench_ssI_trig_ast_1 thrpt 25 127.805 ± 0.212 ops/s +[info] JmhBench.bench_ssI_trig_ils_0 thrpt 25 127427.062 ± 250.370 ops/s +[info] JmhBench.bench_ssI_trig_ils_1 thrpt 25 127.624 ± 0.198 ops/s +[info] JmhBench.bench_ssI_trig_ish_0 thrpt 25 126980.180 ± 201.098 ops/s +[info] JmhBench.bench_ssI_trig_ish_1 thrpt 25 125.386 ± 0.839 ops/s +[info] JmhBench.bench_ssI_trig_jix_0 thrpt 25 128047.658 ± 232.857 ops/s +[info] JmhBench.bench_ssI_trig_jix_1 thrpt 25 127.976 ± 0.179 ops/s +[info] JmhBench.bench_ssI_trig_jln_0 thrpt 25 128373.708 ± 156.722 ops/s +[info] JmhBench.bench_ssI_trig_jln_1 thrpt 25 127.963 ± 0.215 ops/s +[info] JmhBench.bench_ssI_trig_lhs_0 thrpt 25 127860.927 ± 268.862 ops/s +[info] JmhBench.bench_ssI_trig_lhs_1 thrpt 25 127.581 ± 0.288 ops/s +[info] JmhBench.bench_ssI_trig_lst_0 thrpt 25 127175.254 ± 376.425 ops/s +[info] JmhBench.bench_ssI_trig_lst_1 thrpt 25 127.725 ± 0.237 ops/s +[info] JmhBench.bench_ssI_trig_mhs_0 thrpt 25 127309.073 ± 796.229 ops/s +[info] JmhBench.bench_ssI_trig_mhs_1 thrpt 25 126.488 ± 0.355 ops/s +[info] JmhBench.bench_ssI_trig_muq_0 thrpt 25 125063.545 ± 299.759 ops/s +[info] JmhBench.bench_ssI_trig_muq_1 thrpt 25 124.286 ± 0.881 ops/s +[info] JmhBench.bench_ssI_trig_prq_0 thrpt 25 127111.698 ± 354.458 ops/s +[info] JmhBench.bench_ssI_trig_prq_1 thrpt 25 127.102 ± 0.424 ops/s +[info] JmhBench.bench_ssI_trig_que_0 thrpt 25 127059.599 ± 786.206 ops/s +[info] JmhBench.bench_ssI_trig_que_1 thrpt 25 126.141 ± 1.355 ops/s +[info] JmhBench.bench_ssI_trig_stm_0 thrpt 25 127112.678 ± 241.836 ops/s +[info] JmhBench.bench_ssI_trig_stm_1 thrpt 25 127.085 ± 0.441 ops/s +[info] JmhBench.bench_ssI_trig_trs_0 thrpt 25 127168.343 ± 253.254 ops/s +[info] JmhBench.bench_ssI_trig_trs_1 thrpt 25 125.868 ± 0.414 ops/s +[info] JmhBench.bench_ssI_trig_vec_0 thrpt 25 127900.540 ± 166.834 ops/s +[info] JmhBench.bench_ssI_trig_vec_1 thrpt 25 127.803 ± 0.190 ops/s +[info] JmhBench.bench_ssI_trig_wra_0 thrpt 25 128020.166 ± 213.353 ops/s +[info] JmhBench.bench_ssI_trig_wra_1 thrpt 25 127.817 ± 0.152 ops/s +[info] JmhBench.bench_ssS_fmc_arb_0 thrpt 25 7463503.099 ± 761023.850 ops/s +[info] JmhBench.bench_ssS_fmc_arb_1 thrpt 25 22735.029 ± 216.868 ops/s +[info] JmhBench.bench_ssS_fmc_arr_0 thrpt 25 7692217.391 ± 662399.601 ops/s +[info] JmhBench.bench_ssS_fmc_arr_1 thrpt 25 23523.761 ± 3082.429 ops/s +[info] JmhBench.bench_ssS_fmc_ars_0 thrpt 25 7359939.104 ± 733550.142 ops/s +[info] JmhBench.bench_ssS_fmc_ars_1 thrpt 25 18184.014 ± 3207.106 ops/s +[info] JmhBench.bench_ssS_fmc_ast_0 thrpt 25 6777545.559 ± 711554.629 ops/s +[info] JmhBench.bench_ssS_fmc_ast_1 thrpt 25 17990.091 ± 2754.649 ops/s +[info] JmhBench.bench_ssS_fmc_ils_0 thrpt 25 5536641.969 ± 173682.607 ops/s +[info] JmhBench.bench_ssS_fmc_ils_1 thrpt 25 16297.925 ± 1948.984 ops/s +[info] JmhBench.bench_ssS_fmc_ish_0 thrpt 25 5276557.648 ± 151673.651 ops/s +[info] JmhBench.bench_ssS_fmc_ish_1 thrpt 25 5487.043 ± 26.346 ops/s +[info] JmhBench.bench_ssS_fmc_jix_0 thrpt 25 8622948.878 ± 545772.382 ops/s +[info] JmhBench.bench_ssS_fmc_jix_1 thrpt 25 25400.994 ± 2093.185 ops/s +[info] JmhBench.bench_ssS_fmc_jln_0 thrpt 25 8110492.748 ± 334080.983 ops/s +[info] JmhBench.bench_ssS_fmc_jln_1 thrpt 25 26365.259 ± 2299.475 ops/s +[info] JmhBench.bench_ssS_fmc_lhs_0 thrpt 25 6786461.725 ± 739728.565 ops/s +[info] JmhBench.bench_ssS_fmc_lhs_1 thrpt 25 20871.320 ± 2145.218 ops/s +[info] JmhBench.bench_ssS_fmc_lst_0 thrpt 25 5694810.309 ± 980249.336 ops/s +[info] JmhBench.bench_ssS_fmc_lst_1 thrpt 25 16006.665 ± 115.192 ops/s +[info] JmhBench.bench_ssS_fmc_mhs_0 thrpt 25 4918958.662 ± 1011742.955 ops/s +[info] JmhBench.bench_ssS_fmc_mhs_1 thrpt 25 6243.466 ± 16.259 ops/s +[info] JmhBench.bench_ssS_fmc_muq_0 thrpt 25 3143846.099 ± 161160.702 ops/s +[info] JmhBench.bench_ssS_fmc_muq_1 thrpt 25 5153.514 ± 88.295 ops/s +[info] JmhBench.bench_ssS_fmc_prq_0 thrpt 25 6494916.233 ± 659140.032 ops/s +[info] JmhBench.bench_ssS_fmc_prq_1 thrpt 25 13046.181 ± 129.301 ops/s +[info] JmhBench.bench_ssS_fmc_que_0 thrpt 25 5233751.510 ± 600136.627 ops/s +[info] JmhBench.bench_ssS_fmc_que_1 thrpt 25 11701.632 ± 255.248 ops/s +[info] JmhBench.bench_ssS_fmc_stm_0 thrpt 25 5954886.043 ± 721275.435 ops/s +[info] JmhBench.bench_ssS_fmc_stm_1 thrpt 25 12476.953 ± 657.314 ops/s +[info] JmhBench.bench_ssS_fmc_trs_0 thrpt 25 5512418.318 ± 431220.677 ops/s +[info] JmhBench.bench_ssS_fmc_trs_1 thrpt 25 7876.367 ± 49.840 ops/s +[info] JmhBench.bench_ssS_fmc_vec_0 thrpt 25 6647652.124 ± 808794.052 ops/s +[info] JmhBench.bench_ssS_fmc_vec_1 thrpt 25 18797.521 ± 4612.928 ops/s +[info] JmhBench.bench_ssS_fmc_wra_0 thrpt 25 7312179.994 ± 727113.651 ops/s +[info] JmhBench.bench_ssS_fmc_wra_1 thrpt 25 19996.576 ± 3747.471 ops/s +[info] JmhBench.bench_ssS_htrg_arb_0 thrpt 25 99446.779 ± 280.765 ops/s +[info] JmhBench.bench_ssS_htrg_arb_1 thrpt 25 98.641 ± 0.151 ops/s +[info] JmhBench.bench_ssS_htrg_arr_0 thrpt 25 99590.833 ± 277.204 ops/s +[info] JmhBench.bench_ssS_htrg_arr_1 thrpt 25 98.728 ± 0.115 ops/s +[info] JmhBench.bench_ssS_htrg_ars_0 thrpt 25 99488.987 ± 325.960 ops/s +[info] JmhBench.bench_ssS_htrg_ars_1 thrpt 25 98.556 ± 0.157 ops/s +[info] JmhBench.bench_ssS_htrg_ast_0 thrpt 25 99404.624 ± 239.178 ops/s +[info] JmhBench.bench_ssS_htrg_ast_1 thrpt 25 98.474 ± 0.236 ops/s +[info] JmhBench.bench_ssS_htrg_ils_0 thrpt 25 99089.876 ± 928.141 ops/s +[info] JmhBench.bench_ssS_htrg_ils_1 thrpt 25 98.270 ± 0.161 ops/s +[info] JmhBench.bench_ssS_htrg_ish_0 thrpt 25 99700.720 ± 151.216 ops/s +[info] JmhBench.bench_ssS_htrg_ish_1 thrpt 25 96.097 ± 0.123 ops/s +[info] JmhBench.bench_ssS_htrg_jix_0 thrpt 25 100076.554 ± 253.018 ops/s +[info] JmhBench.bench_ssS_htrg_jix_1 thrpt 25 98.762 ± 0.134 ops/s +[info] JmhBench.bench_ssS_htrg_jln_0 thrpt 25 100312.125 ± 185.924 ops/s +[info] JmhBench.bench_ssS_htrg_jln_1 thrpt 25 98.734 ± 0.126 ops/s +[info] JmhBench.bench_ssS_htrg_lhs_0 thrpt 25 99754.119 ± 217.214 ops/s +[info] JmhBench.bench_ssS_htrg_lhs_1 thrpt 25 98.602 ± 0.120 ops/s +[info] JmhBench.bench_ssS_htrg_lst_0 thrpt 25 99829.970 ± 238.825 ops/s +[info] JmhBench.bench_ssS_htrg_lst_1 thrpt 25 98.721 ± 0.082 ops/s +[info] JmhBench.bench_ssS_htrg_mhs_0 thrpt 25 99476.088 ± 201.311 ops/s +[info] JmhBench.bench_ssS_htrg_mhs_1 thrpt 25 97.000 ± 0.127 ops/s +[info] JmhBench.bench_ssS_htrg_muq_0 thrpt 25 97745.699 ± 289.134 ops/s +[info] JmhBench.bench_ssS_htrg_muq_1 thrpt 25 96.814 ± 0.216 ops/s +[info] JmhBench.bench_ssS_htrg_prq_0 thrpt 25 99471.700 ± 420.144 ops/s +[info] JmhBench.bench_ssS_htrg_prq_1 thrpt 25 97.984 ± 0.144 ops/s +[info] JmhBench.bench_ssS_htrg_que_0 thrpt 25 99167.531 ± 519.919 ops/s +[info] JmhBench.bench_ssS_htrg_que_1 thrpt 25 97.867 ± 0.734 ops/s +[info] JmhBench.bench_ssS_htrg_stm_0 thrpt 25 99286.997 ± 493.065 ops/s +[info] JmhBench.bench_ssS_htrg_stm_1 thrpt 25 98.307 ± 0.123 ops/s +[info] JmhBench.bench_ssS_htrg_trs_0 thrpt 25 99632.863 ± 167.388 ops/s +[info] JmhBench.bench_ssS_htrg_trs_1 thrpt 25 97.235 ± 0.186 ops/s +[info] JmhBench.bench_ssS_htrg_vec_0 thrpt 25 99346.287 ± 342.790 ops/s +[info] JmhBench.bench_ssS_htrg_vec_1 thrpt 25 98.639 ± 0.134 ops/s +[info] JmhBench.bench_ssS_htrg_wra_0 thrpt 25 99393.808 ± 435.384 ops/s +[info] JmhBench.bench_ssS_htrg_wra_1 thrpt 25 98.650 ± 0.113 ops/s +[info] JmhBench.bench_ssS_mdtc_arb_0 thrpt 25 1984988.001 ± 60815.590 ops/s +[info] JmhBench.bench_ssS_mdtc_arb_1 thrpt 25 1804763.676 ± 24880.414 ops/s +[info] JmhBench.bench_ssS_mdtc_arr_0 thrpt 25 2039192.164 ± 20642.902 ops/s +[info] JmhBench.bench_ssS_mdtc_arr_1 thrpt 25 1869149.671 ± 57778.551 ops/s +[info] JmhBench.bench_ssS_mdtc_ars_0 thrpt 25 2152810.620 ± 257467.311 ops/s +[info] JmhBench.bench_ssS_mdtc_ars_1 thrpt 25 1943524.599 ± 249992.603 ops/s +[info] JmhBench.bench_ssS_mdtc_ast_0 thrpt 25 2098672.703 ± 224737.345 ops/s +[info] JmhBench.bench_ssS_mdtc_ast_1 thrpt 25 1756317.464 ± 32268.509 ops/s +[info] JmhBench.bench_ssS_mdtc_jix_0 thrpt 25 1886876.043 ± 40790.835 ops/s +[info] JmhBench.bench_ssS_mdtc_jix_1 thrpt 25 1776842.938 ± 87607.424 ops/s +[info] JmhBench.bench_ssS_mdtc_jln_0 thrpt 25 1924389.622 ± 100113.823 ops/s +[info] JmhBench.bench_ssS_mdtc_jln_1 thrpt 25 1739231.139 ± 27705.929 ops/s +[info] JmhBench.bench_ssS_mdtc_lst_0 thrpt 25 2088812.599 ± 282022.358 ops/s +[info] JmhBench.bench_ssS_mdtc_lst_1 thrpt 25 1885221.781 ± 98783.686 ops/s +[info] JmhBench.bench_ssS_mdtc_muq_0 thrpt 25 1535772.126 ± 139156.482 ops/s +[info] JmhBench.bench_ssS_mdtc_muq_1 thrpt 25 1348730.978 ± 123859.731 ops/s +[info] JmhBench.bench_ssS_mdtc_que_0 thrpt 25 1808813.567 ± 38380.283 ops/s +[info] JmhBench.bench_ssS_mdtc_que_1 thrpt 25 1859406.391 ± 243374.664 ops/s +[info] JmhBench.bench_ssS_mdtc_stm_0 thrpt 25 2159647.943 ± 261644.254 ops/s +[info] JmhBench.bench_ssS_mdtc_stm_1 thrpt 25 2060597.468 ± 239102.768 ops/s +[info] JmhBench.bench_ssS_mdtc_vec_0 thrpt 25 1960074.223 ± 19074.784 ops/s +[info] JmhBench.bench_ssS_mdtc_vec_1 thrpt 25 1838536.051 ± 83700.683 ops/s +[info] JmhBench.bench_ssS_mdtc_wra_0 thrpt 25 1974542.200 ± 19477.153 ops/s +[info] JmhBench.bench_ssS_mdtc_wra_1 thrpt 25 1846644.168 ± 61236.016 ops/s +[info] JmhBench.bench_ssS_nbr_arb_0 thrpt 25 15162942.609 ± 190397.163 ops/s +[info] JmhBench.bench_ssS_nbr_arb_1 thrpt 25 34172.246 ± 11486.810 ops/s +[info] JmhBench.bench_ssS_nbr_arr_0 thrpt 25 15507888.940 ± 238185.693 ops/s +[info] JmhBench.bench_ssS_nbr_arr_1 thrpt 25 35941.101 ± 10040.604 ops/s +[info] JmhBench.bench_ssS_nbr_ars_0 thrpt 25 15197879.786 ± 98346.084 ops/s +[info] JmhBench.bench_ssS_nbr_ars_1 thrpt 25 38301.207 ± 9362.340 ops/s +[info] JmhBench.bench_ssS_nbr_ast_0 thrpt 25 13432007.539 ± 405165.436 ops/s +[info] JmhBench.bench_ssS_nbr_ast_1 thrpt 25 32959.315 ± 8359.559 ops/s +[info] JmhBench.bench_ssS_nbr_ils_0 thrpt 25 8549875.457 ± 500035.754 ops/s +[info] JmhBench.bench_ssS_nbr_ils_1 thrpt 25 21605.083 ± 8611.146 ops/s +[info] JmhBench.bench_ssS_nbr_ish_0 thrpt 25 7664446.924 ± 98245.672 ops/s +[info] JmhBench.bench_ssS_nbr_ish_1 thrpt 25 5854.499 ± 28.620 ops/s +[info] JmhBench.bench_ssS_nbr_jix_0 thrpt 25 16089447.761 ± 68029.793 ops/s +[info] JmhBench.bench_ssS_nbr_jix_1 thrpt 25 40651.466 ± 4089.126 ops/s +[info] JmhBench.bench_ssS_nbr_jln_0 thrpt 25 14995175.785 ± 244736.253 ops/s +[info] JmhBench.bench_ssS_nbr_jln_1 thrpt 25 37382.867 ± 1452.421 ops/s +[info] JmhBench.bench_ssS_nbr_lhs_0 thrpt 25 13926273.218 ± 110410.977 ops/s +[info] JmhBench.bench_ssS_nbr_lhs_1 thrpt 25 27298.620 ± 8327.998 ops/s +[info] JmhBench.bench_ssS_nbr_lst_0 thrpt 25 10221445.868 ± 204571.661 ops/s +[info] JmhBench.bench_ssS_nbr_lst_1 thrpt 25 22706.524 ± 127.693 ops/s +[info] JmhBench.bench_ssS_nbr_mhs_0 thrpt 25 9484224.952 ± 83230.676 ops/s +[info] JmhBench.bench_ssS_nbr_mhs_1 thrpt 25 4272.497 ± 16.585 ops/s +[info] JmhBench.bench_ssS_nbr_muq_0 thrpt 25 3306298.286 ± 32629.694 ops/s +[info] JmhBench.bench_ssS_nbr_muq_1 thrpt 25 5674.616 ± 95.096 ops/s +[info] JmhBench.bench_ssS_nbr_prq_0 thrpt 25 13602518.766 ± 182996.531 ops/s +[info] JmhBench.bench_ssS_nbr_prq_1 thrpt 25 8229.425 ± 1387.909 ops/s +[info] JmhBench.bench_ssS_nbr_que_0 thrpt 25 7926786.770 ± 71210.458 ops/s +[info] JmhBench.bench_ssS_nbr_que_1 thrpt 25 12578.198 ± 340.733 ops/s +[info] JmhBench.bench_ssS_nbr_stm_0 thrpt 25 10451796.812 ± 162198.366 ops/s +[info] JmhBench.bench_ssS_nbr_stm_1 thrpt 25 10900.482 ± 876.892 ops/s +[info] JmhBench.bench_ssS_nbr_trs_0 thrpt 25 8243471.097 ± 135268.322 ops/s +[info] JmhBench.bench_ssS_nbr_trs_1 thrpt 25 5423.466 ± 46.213 ops/s +[info] JmhBench.bench_ssS_nbr_vec_0 thrpt 25 11273633.534 ± 133713.186 ops/s +[info] JmhBench.bench_ssS_nbr_vec_1 thrpt 25 22784.871 ± 3851.382 ops/s +[info] JmhBench.bench_ssS_nbr_wra_0 thrpt 25 15100257.953 ± 74244.381 ops/s +[info] JmhBench.bench_ssS_nbr_wra_1 thrpt 25 33805.074 ± 11062.118 ops/s +[info] JmhBench.bench_tpI_sum_arb_0 thrpt 25 26739900.465 ± 335390.195 ops/s +[info] JmhBench.bench_tpI_sum_arb_1 thrpt 25 36127.483 ± 192.396 ops/s +[info] JmhBench.bench_tpI_sum_arr_0 thrpt 25 96980149.604 ± 307301.990 ops/s +[info] JmhBench.bench_tpI_sum_arr_1 thrpt 25 433545.154 ± 505.319 ops/s +[info] JmhBench.bench_tpI_sum_ars_0 thrpt 25 62659319.636 ± 263473.537 ops/s +[info] JmhBench.bench_tpI_sum_ars_1 thrpt 25 134066.476 ± 2016.903 ops/s +[info] JmhBench.bench_tpI_sum_ish_0 thrpt 25 13553901.120 ± 539231.487 ops/s +[info] JmhBench.bench_tpI_sum_ish_1 thrpt 25 7114.146 ± 88.443 ops/s +[info] JmhBench.bench_tpI_sum_mhs_0 thrpt 25 34967900.851 ± 170504.872 ops/s +[info] JmhBench.bench_tpI_sum_mhs_1 thrpt 25 28972.583 ± 4297.660 ops/s +[info] JmhBench.bench_tpI_sum_vec_0 thrpt 25 40722024.527 ± 324420.816 ops/s +[info] JmhBench.bench_tpI_sum_vec_1 thrpt 25 56600.790 ± 453.750 ops/s +[info] JmhBench.bench_tpI_sum_wra_0 thrpt 25 21076023.537 ± 321478.351 ops/s +[info] JmhBench.bench_tpI_sum_wra_1 thrpt 25 21466.089 ± 1622.643 ops/s +[info] JmhBench.bench_tpI_trig_arb_0 thrpt 25 129620.815 ± 139.024 ops/s +[info] JmhBench.bench_tpI_trig_arb_1 thrpt 25 128.952 ± 0.106 ops/s +[info] JmhBench.bench_tpI_trig_arr_0 thrpt 25 129958.638 ± 148.953 ops/s +[info] JmhBench.bench_tpI_trig_arr_1 thrpt 25 129.385 ± 0.112 ops/s +[info] JmhBench.bench_tpI_trig_ars_0 thrpt 25 129745.070 ± 228.564 ops/s +[info] JmhBench.bench_tpI_trig_ars_1 thrpt 25 128.757 ± 0.199 ops/s +[info] JmhBench.bench_tpI_trig_ish_0 thrpt 25 128598.096 ± 207.794 ops/s +[info] JmhBench.bench_tpI_trig_ish_1 thrpt 25 125.427 ± 0.108 ops/s +[info] JmhBench.bench_tpI_trig_mhs_0 thrpt 25 129109.741 ± 173.754 ops/s +[info] JmhBench.bench_tpI_trig_mhs_1 thrpt 25 127.048 ± 0.209 ops/s +[info] JmhBench.bench_tpI_trig_vec_0 thrpt 25 129567.839 ± 205.096 ops/s +[info] JmhBench.bench_tpI_trig_vec_1 thrpt 25 128.955 ± 0.139 ops/s +[info] JmhBench.bench_tpI_trig_wra_0 thrpt 25 129438.595 ± 145.718 ops/s +[info] JmhBench.bench_tpI_trig_wra_1 thrpt 25 128.359 ± 0.198 ops/s +[info] JmhBench.bench_tpS_htrg_arb_0 thrpt 25 100158.018 ± 205.248 ops/s +[info] JmhBench.bench_tpS_htrg_arb_1 thrpt 25 98.649 ± 0.169 ops/s +[info] JmhBench.bench_tpS_htrg_arr_0 thrpt 25 100144.281 ± 158.463 ops/s +[info] JmhBench.bench_tpS_htrg_arr_1 thrpt 25 98.765 ± 0.260 ops/s +[info] JmhBench.bench_tpS_htrg_ars_0 thrpt 25 100096.628 ± 172.960 ops/s +[info] JmhBench.bench_tpS_htrg_ars_1 thrpt 25 98.710 ± 0.179 ops/s +[info] JmhBench.bench_tpS_htrg_ish_0 thrpt 25 99760.501 ± 217.237 ops/s +[info] JmhBench.bench_tpS_htrg_ish_1 thrpt 25 97.228 ± 0.519 ops/s +[info] JmhBench.bench_tpS_htrg_mhs_0 thrpt 25 99806.585 ± 153.342 ops/s +[info] JmhBench.bench_tpS_htrg_mhs_1 thrpt 25 97.412 ± 0.354 ops/s +[info] JmhBench.bench_tpS_htrg_vec_0 thrpt 25 99612.078 ± 530.034 ops/s +[info] JmhBench.bench_tpS_htrg_vec_1 thrpt 25 98.638 ± 0.206 ops/s +[info] JmhBench.bench_tpS_htrg_wra_0 thrpt 25 100084.080 ± 194.389 ops/s +[info] JmhBench.bench_tpS_htrg_wra_1 thrpt 25 98.759 ± 0.217 ops/s +[info] JmhBench.bench_tpS_nbr_arb_0 thrpt 25 27773552.600 ± 107286.891 ops/s +[info] JmhBench.bench_tpS_nbr_arb_1 thrpt 25 26047.916 ± 541.631 ops/s +[info] JmhBench.bench_tpS_nbr_arr_0 thrpt 25 32696587.907 ± 125809.446 ops/s +[info] JmhBench.bench_tpS_nbr_arr_1 thrpt 25 31707.576 ± 1469.663 ops/s +[info] JmhBench.bench_tpS_nbr_ars_0 thrpt 25 27763273.573 ± 75994.385 ops/s +[info] JmhBench.bench_tpS_nbr_ars_1 thrpt 25 25907.969 ± 499.582 ops/s +[info] JmhBench.bench_tpS_nbr_ish_0 thrpt 25 10561733.949 ± 333991.228 ops/s +[info] JmhBench.bench_tpS_nbr_ish_1 thrpt 25 6686.211 ± 38.894 ops/s +[info] JmhBench.bench_tpS_nbr_mhs_0 thrpt 25 20592408.675 ± 290612.904 ops/s +[info] JmhBench.bench_tpS_nbr_mhs_1 thrpt 25 6586.088 ± 57.671 ops/s +[info] JmhBench.bench_tpS_nbr_vec_0 thrpt 25 19918742.462 ± 129361.736 ops/s +[info] JmhBench.bench_tpS_nbr_vec_1 thrpt 25 22393.856 ± 660.971 ops/s +[info] JmhBench.bench_tpS_nbr_wra_0 thrpt 25 27798715.944 ± 399855.662 ops/s +[info] JmhBench.bench_tpS_nbr_wra_1 thrpt 25 26368.583 ± 698.515 ops/s +[info] JmhBench.bench_tsI_sum_ast_0 thrpt 25 29292472.271 ± 378141.088 ops/s +[info] JmhBench.bench_tsI_sum_ast_1 thrpt 25 38732.103 ± 250.264 ops/s +[info] JmhBench.bench_tsI_sum_ils_0 thrpt 25 12612812.138 ± 2285614.348 ops/s +[info] JmhBench.bench_tsI_sum_ils_1 thrpt 25 20618.454 ± 7399.667 ops/s +[info] JmhBench.bench_tsI_sum_lhs_0 thrpt 25 19047956.030 ± 102806.511 ops/s +[info] JmhBench.bench_tsI_sum_lhs_1 thrpt 25 21098.108 ± 1892.775 ops/s +[info] JmhBench.bench_tsI_sum_lst_0 thrpt 25 13250785.213 ± 284214.672 ops/s +[info] JmhBench.bench_tsI_sum_lst_1 thrpt 25 20620.823 ± 7198.647 ops/s +[info] JmhBench.bench_tsI_sum_muq_0 thrpt 25 6186108.761 ± 49405.604 ops/s +[info] JmhBench.bench_tsI_sum_muq_1 thrpt 25 6953.879 ± 68.504 ops/s +[info] JmhBench.bench_tsI_sum_prq_0 thrpt 25 15439239.952 ± 61151.281 ops/s +[info] JmhBench.bench_tsI_sum_prq_1 thrpt 25 15760.730 ± 44.485 ops/s +[info] JmhBench.bench_tsI_sum_que_0 thrpt 25 6544132.929 ± 90771.895 ops/s +[info] JmhBench.bench_tsI_sum_que_1 thrpt 25 7583.197 ± 82.784 ops/s +[info] JmhBench.bench_tsI_sum_stm_0 thrpt 25 12838212.904 ± 40317.104 ops/s +[info] JmhBench.bench_tsI_sum_stm_1 thrpt 25 13428.876 ± 412.327 ops/s +[info] JmhBench.bench_tsI_sum_trs_0 thrpt 25 11018879.369 ± 36863.225 ops/s +[info] JmhBench.bench_tsI_sum_trs_1 thrpt 25 12874.559 ± 320.397 ops/s +[info] JmhBench.bench_tsI_trig_ast_0 thrpt 25 129218.582 ± 247.185 ops/s +[info] JmhBench.bench_tsI_trig_ast_1 thrpt 25 128.730 ± 0.134 ops/s +[info] JmhBench.bench_tsI_trig_ils_0 thrpt 25 128764.813 ± 270.973 ops/s +[info] JmhBench.bench_tsI_trig_ils_1 thrpt 25 128.383 ± 0.229 ops/s +[info] JmhBench.bench_tsI_trig_lhs_0 thrpt 25 129310.531 ± 402.325 ops/s +[info] JmhBench.bench_tsI_trig_lhs_1 thrpt 25 128.862 ± 0.132 ops/s +[info] JmhBench.bench_tsI_trig_lst_0 thrpt 25 128629.859 ± 169.266 ops/s +[info] JmhBench.bench_tsI_trig_lst_1 thrpt 25 128.817 ± 0.136 ops/s +[info] JmhBench.bench_tsI_trig_muq_0 thrpt 25 126882.540 ± 261.902 ops/s +[info] JmhBench.bench_tsI_trig_muq_1 thrpt 25 124.074 ± 0.275 ops/s +[info] JmhBench.bench_tsI_trig_prq_0 thrpt 25 128704.594 ± 132.903 ops/s +[info] JmhBench.bench_tsI_trig_prq_1 thrpt 25 127.264 ± 0.215 ops/s +[info] JmhBench.bench_tsI_trig_que_0 thrpt 25 127027.541 ± 305.857 ops/s +[info] JmhBench.bench_tsI_trig_que_1 thrpt 25 127.115 ± 0.387 ops/s +[info] JmhBench.bench_tsI_trig_stm_0 thrpt 25 128763.321 ± 170.721 ops/s +[info] JmhBench.bench_tsI_trig_stm_1 thrpt 25 127.858 ± 0.341 ops/s +[info] JmhBench.bench_tsI_trig_trs_0 thrpt 25 128251.208 ± 143.219 ops/s +[info] JmhBench.bench_tsI_trig_trs_1 thrpt 25 125.752 ± 0.714 ops/s +[info] JmhBench.bench_tsS_htrg_ast_0 thrpt 25 99993.610 ± 190.884 ops/s +[info] JmhBench.bench_tsS_htrg_ast_1 thrpt 25 98.665 ± 0.229 ops/s +[info] JmhBench.bench_tsS_htrg_ils_0 thrpt 25 99769.301 ± 181.360 ops/s +[info] JmhBench.bench_tsS_htrg_ils_1 thrpt 25 97.749 ± 0.981 ops/s +[info] JmhBench.bench_tsS_htrg_lhs_0 thrpt 25 99843.502 ± 435.773 ops/s +[info] JmhBench.bench_tsS_htrg_lhs_1 thrpt 25 98.554 ± 0.370 ops/s +[info] JmhBench.bench_tsS_htrg_lst_0 thrpt 25 99994.704 ± 271.382 ops/s +[info] JmhBench.bench_tsS_htrg_lst_1 thrpt 25 98.679 ± 0.216 ops/s +[info] JmhBench.bench_tsS_htrg_muq_0 thrpt 25 97832.075 ± 224.268 ops/s +[info] JmhBench.bench_tsS_htrg_muq_1 thrpt 25 96.275 ± 0.219 ops/s +[info] JmhBench.bench_tsS_htrg_prq_0 thrpt 25 100041.101 ± 215.115 ops/s +[info] JmhBench.bench_tsS_htrg_prq_1 thrpt 25 98.142 ± 0.362 ops/s +[info] JmhBench.bench_tsS_htrg_que_0 thrpt 25 99899.045 ± 199.887 ops/s +[info] JmhBench.bench_tsS_htrg_que_1 thrpt 25 98.160 ± 0.193 ops/s +[info] JmhBench.bench_tsS_htrg_stm_0 thrpt 25 99938.866 ± 583.593 ops/s +[info] JmhBench.bench_tsS_htrg_stm_1 thrpt 25 98.389 ± 0.258 ops/s +[info] JmhBench.bench_tsS_htrg_trs_0 thrpt 25 99843.112 ± 278.174 ops/s +[info] JmhBench.bench_tsS_htrg_trs_1 thrpt 25 97.433 ± 0.404 ops/s +[info] JmhBench.bench_tsS_nbr_ast_0 thrpt 25 24754126.558 ± 2252827.519 ops/s +[info] JmhBench.bench_tsS_nbr_ast_1 thrpt 25 25470.616 ± 2313.648 ops/s +[info] JmhBench.bench_tsS_nbr_ils_0 thrpt 25 18815243.091 ± 229054.186 ops/s +[info] JmhBench.bench_tsS_nbr_ils_1 thrpt 25 18405.753 ± 1106.520 ops/s +[info] JmhBench.bench_tsS_nbr_lhs_0 thrpt 25 23611132.342 ± 214340.942 ops/s +[info] JmhBench.bench_tsS_nbr_lhs_1 thrpt 25 19916.336 ± 750.264 ops/s +[info] JmhBench.bench_tsS_nbr_lst_0 thrpt 25 16162287.601 ± 386166.719 ops/s +[info] JmhBench.bench_tsS_nbr_lst_1 thrpt 25 15050.506 ± 225.637 ops/s +[info] JmhBench.bench_tsS_nbr_muq_0 thrpt 25 3990516.577 ± 72332.103 ops/s +[info] JmhBench.bench_tsS_nbr_muq_1 thrpt 25 3957.072 ± 47.475 ops/s +[info] JmhBench.bench_tsS_nbr_prq_0 thrpt 25 24071415.301 ± 190014.780 ops/s +[info] JmhBench.bench_tsS_nbr_prq_1 thrpt 25 12006.143 ± 668.510 ops/s +[info] JmhBench.bench_tsS_nbr_que_0 thrpt 25 11035865.936 ± 125073.361 ops/s +[info] JmhBench.bench_tsS_nbr_que_1 thrpt 25 10895.451 ± 82.204 ops/s +[info] JmhBench.bench_tsS_nbr_stm_0 thrpt 25 15774294.393 ± 161310.226 ops/s +[info] JmhBench.bench_tsS_nbr_stm_1 thrpt 25 12751.885 ± 218.859 ops/s +[info] JmhBench.bench_tsS_nbr_trs_0 thrpt 25 12164073.361 ± 417198.078 ops/s +[info] JmhBench.bench_tsS_nbr_trs_1 thrpt 25 9425.149 ± 212.728 ops/s +[success] Total time: 42760 s, completed Jan 6, 2016 11:28:41 AM +> diff --git a/benchmark/src/main/scala/bench/CodeGen.scala b/benchmark/src/main/scala/bench/CodeGen.scala new file mode 100644 index 0000000..53ede95 --- /dev/null +++ b/benchmark/src/main/scala/bench/CodeGen.scala @@ -0,0 +1,254 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package bench.codegen + +import scala.util._ +import control.NonFatal + +object Generator { + // Trailing * means that a collection is not expected to work efficiently in parallel + // Trailing ! means that a collection is not expected to maintain original order + val annotated = "arr ish! lst* ils*! que* stm* trs*! vec arb ars ast* mhs! lhs*! prq*! muq* wra jix jln".split(' ') + + // Parallel version if any appears after / + // Trailing % means that operation creates new collections and thus can't be used with Stepper (sequential ops only) + // Trailing ! means that collection must maintain original order (i.e. don't use if collection is marked !) + val allops = Seq(("OnInt", "I", "sum/psum trig/ptrig fmc%/pfmc mdtc!%"), ("OnString", "S", "nbr/pnbr htrg/phtrg fmc%/pfmc mdtc!%")) + + def parsefs(fs: String) = fs.split(' ').map(_.split('/') match { + case Array(x) => (x.takeWhile(_.isLetter), None, x contains "!", x contains "%") + case Array(x,y) => (x.takeWhile(_.isLetter), Some(y.takeWhile(_.isLetter)), (x+y) contains "!", x contains "%") + }) + + val names = annotated.map(_.takeWhile(_.isLetter)) + val nojname = names.filterNot(_ startsWith "j").toSet + val parname = annotated.filterNot(_ contains "*").map(_.takeWhile(_.isLetter)).toSet + val sqnname = names.filterNot(parname).toSet union names.filterNot(nojname).toSet + val ordname = annotated.filterNot(_ contains "!").map(_.takeWhile(_.isLetter)).toSet + val jmhsizes = Array(10, 10000) // JMH takes FOREVER, so we're lucky to get two sizes. + + def writeTo(f: java.io.File)(pr: (String => Unit) => Unit): Either[Throwable, Unit] = { + try { + val pw = new java.io.PrintWriter(f) + val wr: String => Unit = s => pw.println(s) + try { pr(wr); Right(()) } + catch { case NonFatal(t) => Left(t) } + finally { pw.close() } + } + catch { case NonFatal(t) => Left(t) } + } + + def sayArrayI(oa: Option[Array[Int]]) = oa match { case Some(a) => a.mkString("Array(", ",", ")"); case _ => "" } + + def agreement(target: java.io.File, sizes: Option[Array[Int]] = None): Unit = { + val q = "\"" + if (target.exists) throw new java.io.IOException("Generator will not write to existing file: " + target.getPath) + writeTo(target){ pr => + pr( """package bench.test""") + pr( """""") + pr( """import bench.generate._, bench.operate._, bench.generate.EnableIterators._""") + pr( """import scala.compat.java8.StreamConverters._""") + pr( """""") + pr( """object Agreement {""") + pr( """ def run() {""") + pr( """ val wrong = new collection.mutable.ArrayBuffer[String]""") + pr( """ def check[A](a1: A, a2: => A, msg: String) {""") + pr( """ var t = System.nanoTime""") + pr( """ if (!CloseEnough(a1, { val ans = a2; t = System.nanoTime - t; ans}))""") + pr( """ wrong += msg""") + pr( """ if (t > 2000000000) wrong += "Slow " + msg""") + pr( """ }""") + pr( s" val m = (new bench.generate.Things(${sayArrayI(sizes)})).N;" ) + allops.foreach{ case (o, t, fs) => + names.foreach{ n => + pr( s" { // Scope for operations $o collection $n") + pr( s" var x = new bench.generate.Things(${sayArrayI(sizes)})" ) + parsefs(fs).foreach{ case (f, pf, ord, nu) => + if (ordname(n) || !ord) { + pr( """ for (i <- 0 until m) {""") + pr( s" val z = $o.$f(x.arr.c$t(i))") + if (nojname(n)) { + pr( s" check(z, $o.$f(x.$n.c$t(i)), ${q}c$t $f $n ${q}+i.toString)") + pr( s" check(z, $o.$f(x.$n.i$t(i)), ${q}i$t $f $n ${q}+i.toString)") + if (pf.isDefined) + pr( s" check(z, $o.${pf.get}(x.$n.c$t(i).par), ${q}i$t ${pf.get} $n ${q}+i.toString)") + } + if (sqnname(n) || parname(n)) { + pr( s" check(z, $o.$f(x.$n.ss$t(i)), ${q}ss$t $f $n ${q}+i.toString)") + if (nojname(n) && !nu) { + if (sqnname(n)) + pr( s" check(z, $o.$f(x.$n.ts$t(i)), ${q}ts$t $f $n ${q}+i.toString)") + else + pr( s" check(z, $o.$f(x.$n.tp$t(i)), ${q}tp$t $f $n ${q}+i.toString)") + } + } + if (parname(n) && pf.isDefined) { + pr( s" check(z, $o.${pf.get}(x.$n.sp$t(i)), ${q}sp$t ${pf.get} $n ${q}+i.toString)") + if (nojname(n) && !nu) + pr( s" check(z, $o.${pf.get}(x.$n.zp$t(i)), ${q}zp$t ${pf.get} $n ${q}+i.toString)") + } + pr( s" }") + } + } + pr( s" x = null // Allow GC" ) + pr( s" } // End scope for operations $o collection $n") + } + } + pr( """ wrong.foreach(println)""") + pr( """ if (wrong.nonEmpty) sys.exit(1) """) + pr( """ }""") + pr( """}""") + } match { + case Left(t) => println("Did not successfully write file: " + target.getPath); throw t + case _ => + } + } + + def quickBenchWithThyme(target: java.io.File, sizes: Option[Array[Int]] = None): Unit = { + val q = "\"" + if (target.exists) throw new java.io.IOException("Generator will not write to existing file: " + target.getPath) + writeTo(target){ pr => + pr( """package bench.test""") + pr( """""") + pr( """import bench.generate._, bench.operate._, bench.generate.EnableIterators._""") + pr( """import scala.compat.java8.StreamConverters._""") + pr( """import ichi.bench.Thyme""") + pr( """""") + pr( """object ThymeBench {""") + pr( """ def run() {""") + pr( """ val th = Thyme.warmed()""") + pr( s" val m = (new bench.generate.Things(${sayArrayI(sizes)})).N;" ) + pr( """ def timings[A](x: bench.generate.Things, op: Int => A, name: String) {""") + pr( """ val ts = new collection.mutable.ArrayBuffer[(Double, Double, Double)]""") + pr( """ val discard = th.clock(op(m-1))(_ => ()) // Init collections""") + pr( """ for (i <- 0 until m) {""") + pr( """ println(name + i)""") + pr( """ val b = Thyme.Benched.empty""") + pr( """ val a = th.bench(op(i))(b)""") + pr( """ if (a == null) ts += ((Double.NaN, Double.NaN, Double.NaN))""") + pr( """ else ts += ((""") + pr( """ b.runtime * 1e6, b.runtimeCI95._1 * 1e6, b.runtimeCI95._2 * 1e6""") + pr( """ ))""") + pr( """ }""") + pr( """ val sb = new StringBuilder""") + pr( """ sb ++= name + ":" """) + pr( """ if (sb.length < 16) sb ++= " " * (16 - sb.length)""") + pr( """ ts.foreach{ case (c, lo, hi) =>""") + pr( """ sb ++= " " """) + pr( """ sb ++= " %11.4f".format(c)""") + pr( """ sb ++= " %11.4f".format(lo)""") + pr( """ sb ++= " %11.4f".format(hi)""") + pr( """ }""") + pr( """ println(sb.result)""") + pr( """ }""") + allops.foreach{ case (o, t, fs) => + names.foreach{ n => + pr( s" { // Scope for operations $o collection $n") + pr( s" var x = new bench.generate.Things(${sayArrayI(sizes)})" ) + parsefs(fs).foreach{ case (f, pf, ord, nu) => + if (ordname(n) || !ord) { + if (nojname(n)) { + pr( s" timings(x, i => $o.$f(x.$n.c$t(i)), ${q}c$t $f $n${q})"); + pr( s" timings(x, i => $o.$f(x.$n.i$t(i)), ${q}i$t $f $n${q})") + if (!nu) { + if (sqnname(n)) + pr( s" timings(x, i => $o.$f(x.$n.ts$t(i)), ${q}ts$t $f $n${q})"); + else + pr( s" timings(x, i => $o.$f(x.$n.tp$t(i)), ${q}tp$t $f $n${q})"); + } + } + if (sqnname(n)) { + pr( s" timings(x, i => $o.$f(x.$n.ss$t(i)), ${q}ss$t $f $n${q})") + if (nojname(n)) + pr( s" timings(x, i => $o.$f(x.$n.zs$t(i)), ${q}zs$t $f $n${q})") + } + if (parname(n) && pf.isDefined) { + pr( s" timings(x, i => $o.$f(x.$n.sp$t(i)), ${q}sp$t $f $n${q})") + if (nojname(n)) + pr( s" timings(x, i => $o.$f(x.$n.zp$t(i)), ${q}zp$t $f $n${q})") + } + } + } + pr( s" x = null // Allow GC" ) + pr( s" } // End scope for operations $o collection $n") + } + } + pr( """ }""") + pr( """}""") + } match { + case Left(t) => println("Did not successfully write file: " + target.getPath); throw t + case _ => + } + } + + def jmhBench(target: java.io.File = new java.io.File("JmhBench.scala"), sizes: Option[Array[Int]] = Some(jmhsizes)): Unit = { + val q = "\"" + if (target.exists) throw new java.io.IOException("Generator will not write to existing file: " + target.getPath) + writeTo(target){ pr => + pr( """// This file auto-generated by bench.codegen.Generator.jmhBench. Do not modify directly.""") + pr( """""") + pr( """package bench.test""") + pr( """""") + pr( """import bench.generate._, bench.operate._, bench.generate.EnableIterators._""") + pr( """import scala.compat.java8.StreamConverters._""") + pr( """import org.openjdk.jmh.annotations._""") + pr( """""") + pr( """@State(Scope.Benchmark)""") + pr( """class JmhBench {""") + pr( s" val x = new bench.generate.Things(${sayArrayI(sizes)})") + val m = sizes.map(_.length).getOrElse(new bench.generate.Things().N) + allops.foreach{ case (o, t, fs) => + names.foreach{ n => + parsefs(fs).foreach{ case (f, pf, ord, nu) => + for (i <- 0 until m) { + if (ordname(n) || !ord) { + if (nojname(n)) { + pr( s" @Benchmark def bench_c${t}_${f}_${n}_$i() = $o.$f(x.$n.c$t($i))") + pr( s" @Benchmark def bench_i${t}_${f}_${n}_$i() = $o.$f(x.$n.i$t($i))") + if (!nu) { + if (sqnname(n)) + pr( s" @Benchmark def bench_ts${t}_${f}_${n}_$i() = $o.$f(x.$n.ts$t($i))") + else + pr( s" @Benchmark def bench_tp${t}_${f}_${n}_$i() = $o.$f(x.$n.tp$t($i))") + } + } + pr( s" @Benchmark def bench_ss${t}_${f}_${n}_$i() = $o.$f(x.$n.ss$t($i))") + //if (nojname(n)) + // pr( s" @Benchmark def bench_zs${t}_${f}_${n}_$i() = $o.$f(x.$n.zs$t($i))") + if (parname(n) && pf.isDefined) { + if (nojname(n)) + pr( s" @Benchmark def bench_cp${t}_${pf.get}_${n}_$i() = $o.${pf.get}(x.$n.c$t($i).par)") + pr( s" @Benchmark def bench_sp${t}_${pf.get}_${n}_$i() = $o.${pf.get}(x.$n.sp$t($i))") + //if (nojname(n)) + // pr( s" @Benchmark def bench_zp${t}_${f}_${n}_$i() = $o.$f(x.$n.zp$t($i))") + } + } + } + } + } + } + pr( """}""") + } match { + case Left(t) => println("Did not successfully write file: " + target.getPath); throw t + case _ => + } + } +} + +object GenJmhBench { + def main(args: Array[String]): Unit = { + val f = new java.io.File("JmhBench.scala") + f.delete() + Generator.jmhBench(f) + } +} diff --git a/benchmark/src/main/scala/bench/CollectionSource.scala b/benchmark/src/main/scala/bench/CollectionSource.scala new file mode 100644 index 0000000..07d021d --- /dev/null +++ b/benchmark/src/main/scala/bench/CollectionSource.scala @@ -0,0 +1,298 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package bench + +import java.util.stream._ + +import scala.collection.generic.CanBuildFrom +import scala.compat.java8.StreamConverters._ +import scala.compat.java8.collectionImpl._ +import scala.compat.java8.converterImpl._ +import scala.compat.java8.{MakesSequentialStream, MakesParallelStream} + +package object generate { + private def myInty(n: Int) = 0 until n + private def myStringy(n: Int) = myInty(n).map(i => (i*i).toString) + + object Coll { + def i[CC[_]](n: Int)(implicit cbf: CanBuildFrom[Nothing, Int, CC[Int]]): CC[Int] = { + val b = cbf(); + myInty(n).foreach(b += _) + b.result() + } + def s[CC[_]](n: Int)(implicit cbf: CanBuildFrom[Nothing, String, CC[String]]): CC[String] = { + val b = cbf(); + myStringy(n).foreach(b += _) + b.result() + } + } + + object Pstep { + def i[CC](cc: CC)(implicit steppize: CC => MakesStepper[Int, EfficientSubstep]): IntStepper = + steppize(cc).stepper + def s[CC](cc: CC)(implicit steppize: CC => MakesStepper[String, EfficientSubstep]): AnyStepper[String] = + steppize(cc).stepper + } + + object Sstep { + def i[CC](cc: CC)(implicit steppize: CC => MakesStepper[Int, Any]): IntStepper = + steppize(cc).stepper + def s[CC](cc: CC)(implicit steppize: CC => MakesStepper[String, Any]): AnyStepper[String] = + steppize(cc).stepper + } + + object PsStream { + def i[CC](cc: CC)(implicit steppize: CC => MakesStepper[Int, EfficientSubstep]): IntStream = + steppize(cc).stepper.parStream + def s[CC](cc: CC)(implicit steppize: CC => MakesStepper[String, EfficientSubstep]): Stream[String] = + steppize(cc).stepper.parStream + } + + object SsStream { + def i[CC](cc: CC)(implicit steppize: CC => MakesStepper[Int, Any]): IntStream = + steppize(cc).stepper.seqStream + def s[CC](cc: CC)(implicit steppize: CC => MakesStepper[String, Any]): Stream[String] = + steppize(cc).stepper.seqStream + } + + object Sstream { + def i[CC](cc: CC)(implicit streamize: CC => MakesSequentialStream[Int, IntStream]) = + streamize(cc).seqStream + def s[CC](cc: CC)(implicit streamize: CC => MakesSequentialStream[String, Stream[String]]) = + streamize(cc).seqStream + } + + object Pstream { + def i[CC](cc: CC)(implicit streamize: CC => MakesParallelStream[Int, IntStream]) = + streamize(cc).parStream + def s[CC](cc: CC)(implicit streamize: CC => MakesParallelStream[String, Stream[String]]) = + streamize(cc).parStream + } + + trait GenThingsOf[CC[_]] { + def title: String + def sizes: Array[Int] + } + + trait IntThingsOf[CC[_]] extends GenThingsOf[CC] { + implicit def myCBFi: CanBuildFrom[Nothing, Int, CC[Int]] + // Base collection + val cI = sizes.map(n => Coll.i[CC](n)) + // Iterator + def iI(j: Int)(implicit x: CC[Int] => Iterator[Int]) = x(cI(j)) + // Steppers (second letter--s = sequential, p = parallel) + def tsI(j: Int)(implicit x: CC[Int] => MakesStepper[Int, Any]) = Sstep i cI(j) + def tpI(j: Int)(implicit x: CC[Int] => MakesStepper[Int, EfficientSubstep]) = Pstep i cI(j) + // Streams + def ssI(j: Int)(implicit x: CC[Int] => MakesSequentialStream[Int, IntStream]) = Sstream i cI(j) + def spI(j: Int)(implicit x: CC[Int] => MakesParallelStream[Int, IntStream]) = Pstream i cI(j) + // Streams via steppers + def zsI(j: Int)(implicit x: CC[Int] => MakesStepper[Int, Any]) = SsStream i cI(j) + def zpI(j: Int)(implicit x: CC[Int] => MakesStepper[Int, EfficientSubstep]) = PsStream i cI(j) + } + + trait StringThingsOf[CC[_]] extends GenThingsOf[CC] { + implicit def myCBFs: CanBuildFrom[Nothing, String, CC[String]] + // Base collection + val cS = sizes.map(n => Coll.s[CC](n)) + // Iterator + def iS(j: Int)(implicit x: CC[String] => Iterator[String]) = x(cS(j)) + // Steppers (second letter--s = sequential, p = parallel) + def tsS(j: Int)(implicit x: CC[String] => MakesStepper[String, Any]) = Sstep s cS(j) + def tpS(j: Int)(implicit x: CC[String] => MakesStepper[String, EfficientSubstep]) = Pstep s cS(j) + // Streams + def ssS(j: Int)(implicit x: CC[String] => MakesSequentialStream[String, Stream[String]]) = Sstream s cS(j) + def spS(j: Int)(implicit x: CC[String] => MakesParallelStream[String, Stream[String]]) = Pstream s cS(j) + // Streams via steppers + def zsS(j: Int)(implicit x: CC[String] => MakesStepper[String, Any]) = SsStream s cS(j) + def zpS(j: Int)(implicit x: CC[String] => MakesStepper[String, EfficientSubstep]) = PsStream s cS(j) + } + + trait ThingsOf[CC[_]] extends IntThingsOf[CC] with StringThingsOf[CC] {} + + abstract class AbstractThings[CC[_]](val title: String)( + implicit + outerCBFi: CanBuildFrom[Nothing, Int, CC[Int]], + outerCBFs: CanBuildFrom[Nothing, String, CC[String]] + ) + extends ThingsOf[CC] { + implicit def myCBFi = outerCBFi + implicit def myCBFs = outerCBFs + } + + // Java collection CBFs + + implicit val javaUtilArrayListIntCanBuildFrom = new CanBuildFrom[Nothing, Int, java.util.ArrayList[Int]] { + def apply(from: Nothing): collection.mutable.Builder[Int, java.util.ArrayList[Int]] = apply() + def apply(): collection.mutable.Builder[Int, java.util.ArrayList[Int]] = new collection.mutable.Builder[Int, java.util.ArrayList[Int]] { + private var myAL = new java.util.ArrayList[Int] + def clear() = { myAL = new java.util.ArrayList[Int]; () } + def result() = { val ans = myAL; clear(); ans } + def +=(x: Int) = { myAL add x; this } + } + } + implicit val javaUtilArrayListStringCanBuildFrom = new CanBuildFrom[Nothing, String, java.util.ArrayList[String]] { + def apply(from: Nothing): collection.mutable.Builder[String, java.util.ArrayList[String]] = apply() + def apply(): collection.mutable.Builder[String, java.util.ArrayList[String]] = new collection.mutable.Builder[String, java.util.ArrayList[String]] { + private var myAL = new java.util.ArrayList[String] + def clear() = { myAL = new java.util.ArrayList[String]; () } + def result() = { val ans = myAL; clear(); ans } + def +=(x: String) = { myAL add x; this } + } + } + implicit val javaUtilLinkedListIntCanBuildFrom = new CanBuildFrom[Nothing, Int, java.util.LinkedList[Int]] { + def apply(from: Nothing): collection.mutable.Builder[Int, java.util.LinkedList[Int]] = apply() + def apply(): collection.mutable.Builder[Int, java.util.LinkedList[Int]] = new collection.mutable.Builder[Int, java.util.LinkedList[Int]] { + private var myLL = new java.util.LinkedList[Int] + def clear() = { myLL = new java.util.LinkedList[Int]; () } + def result() = { val ans = myLL; clear(); ans } + def +=(x: Int) = { myLL add x; this } + } + } + implicit val javaUtilLinkedListStringCanBuildFrom = new CanBuildFrom[Nothing, String, java.util.LinkedList[String]] { + def apply(from: Nothing): collection.mutable.Builder[String, java.util.LinkedList[String]] = apply() + def apply(): collection.mutable.Builder[String, java.util.LinkedList[String]] = new collection.mutable.Builder[String, java.util.LinkedList[String]] { + private var myLL = new java.util.LinkedList[String] + def clear() = { myLL = new java.util.LinkedList[String]; () } + def result() = { val ans = myLL; clear(); ans } + def +=(x: String) = { myLL add x; this } + } + } + + // Streams from ArrayList (Java) + + implicit val getsParStreamFromArrayListInt: (java.util.ArrayList[Int] => MakesParallelStream[Int, IntStream]) = ali => { + new MakesParallelStream[Int, IntStream] { + def parStream: IntStream = ali. + asInstanceOf[java.util.ArrayList[java.lang.Integer]]. + parallelStream.parallel. + mapToInt(new java.util.function.ToIntFunction[java.lang.Integer]{ def applyAsInt(i: java.lang.Integer) = i.intValue }) + } + } + implicit val getsSeqStreamFromArrayListInt: (java.util.ArrayList[Int] => MakesSequentialStream[Int, IntStream]) = ali => { + new MakesSequentialStream[Int, IntStream] { + def seqStream: IntStream = ali. + asInstanceOf[java.util.ArrayList[java.lang.Integer]]. + stream(). + mapToInt(new java.util.function.ToIntFunction[java.lang.Integer]{ def applyAsInt(i: java.lang.Integer) = i.intValue }) + } + } + implicit val getsParStreamFromArrayListString: (java.util.ArrayList[String] => MakesParallelStream[String, Stream[String]]) = als => { + new MakesParallelStream[String, Stream[String]] { + def parStream: Stream[String] = als.parallelStream.parallel + } + } + implicit val getsSeqStreamFromArrayListString: (java.util.ArrayList[String] => MakesSequentialStream[String, Stream[String]]) = als => { + new MakesSequentialStream[String, Stream[String]] { + def seqStream: Stream[String] = als.stream + } + } + + // Streams from LinkedList (Java) + + implicit val getsParStreamFromLinkedListInt: (java.util.LinkedList[Int] => MakesParallelStream[Int, IntStream]) = ali => { + new MakesParallelStream[Int, IntStream] { + def parStream: IntStream = ali. + asInstanceOf[java.util.LinkedList[java.lang.Integer]]. + parallelStream.parallel. + mapToInt(new java.util.function.ToIntFunction[java.lang.Integer]{ def applyAsInt(i: java.lang.Integer) = i.intValue }) + } + } + implicit val getsSeqStreamFromLinkedListInt: (java.util.LinkedList[Int] => MakesSequentialStream[Int, IntStream]) = ali => { + new MakesSequentialStream[Int, IntStream] { + def seqStream: IntStream = ali. + asInstanceOf[java.util.LinkedList[java.lang.Integer]]. + stream(). + mapToInt(new java.util.function.ToIntFunction[java.lang.Integer]{ def applyAsInt(i: java.lang.Integer) = i.intValue }) + } + } + implicit val getsParStreamFromLinkedListString: (java.util.LinkedList[String] => MakesParallelStream[String, Stream[String]]) = als => { + new MakesParallelStream[String, Stream[String]] { + def parStream: Stream[String] = als.parallelStream.parallel + } + } + implicit val getsSeqStreamFromLinkedListString: (java.util.LinkedList[String] => MakesSequentialStream[String, Stream[String]]) = als => { + new MakesSequentialStream[String, Stream[String]] { + def seqStream: Stream[String] = als.stream + } + } + + object EnableIterators { + implicit val iterableIntToIterator: (Iterable[Int] => Iterator[Int]) = _.iterator + implicit val iterableStringToIterator: (Iterable[String] => Iterator[String]) = _.iterator + implicit val arrayIntToIterator: (Array[Int] => Iterator[Int]) = (a: Array[Int]) => new Iterator[Int] { + private[this] var i = 0 + def hasNext = i < a.length + def next = if (hasNext) { var ans = a(i); i += 1; ans } else throw new NoSuchElementException(i.toString) + } + implicit val arrayStringToIterator: (Array[String] => Iterator[String]) = _.iterator + } + + class ArrThings(val sizes: Array[Int]) extends AbstractThings[Array]("Array") {} + + class IshThings(val sizes: Array[Int]) extends AbstractThings[collection.immutable.HashSet]("immutable.HashSet") {} + + class LstThings(val sizes: Array[Int]) extends AbstractThings[List]("List") {} + + class IlsThings(val sizes: Array[Int]) extends AbstractThings[collection.immutable.ListSet]("immutable.ListSet") {} + + class QueThings(val sizes: Array[Int]) extends AbstractThings[collection.immutable.Queue]("immutable.Queue") {} + + class StmThings(val sizes: Array[Int]) extends AbstractThings[collection.immutable.Stream]("immutable.Stream") {} + + class TrsThings(val sizes: Array[Int]) extends AbstractThings[collection.immutable.TreeSet]("immutable.TreeSet") {} + + class VecThings(val sizes: Array[Int]) extends AbstractThings[Vector]("Vector") {} + + class ArbThings(val sizes: Array[Int]) extends AbstractThings[collection.mutable.ArrayBuffer]("mutable.ArrayBuffer") {} + + class ArsThings(val sizes: Array[Int]) extends AbstractThings[collection.mutable.ArraySeq]("mutable.ArraySeq") {} + + class AstThings(val sizes: Array[Int]) extends AbstractThings[collection.mutable.ArrayStack]("mutable.ArrayStack") {} + + class MhsThings(val sizes: Array[Int]) extends AbstractThings[collection.mutable.HashSet]("mutable.HashSet") {} + + class LhsThings(val sizes: Array[Int]) extends AbstractThings[collection.mutable.LinkedHashSet]("mutable.LinkedHashSet") {} + + class PrqThings(val sizes: Array[Int]) extends AbstractThings[collection.mutable.PriorityQueue]("mutable.PriorityQueue") {} + + class MuqThings(val sizes: Array[Int]) extends AbstractThings[collection.mutable.Queue]("mutable.Queue") {} + + class WraThings(val sizes: Array[Int]) extends AbstractThings[collection.mutable.WrappedArray]("mutable.WrappedArray") {} + + class JixThings(val sizes: Array[Int]) extends AbstractThings[java.util.ArrayList]("java.util.ArrayList") {} + + class JlnThings(val sizes: Array[Int]) extends AbstractThings[java.util.LinkedList]("java.util.LinkedList") {} + + class Things(sizes: Array[Int] = Array(0, 1, 2, 5, 7, 15, 16, 32, 33, 64, 129, 256, 1023, 2914, 7151/*, 20000, 50000, 200000*/)) { + def N = sizes.length + lazy val arr = new ArrThings(sizes) + lazy val ish = new IshThings(sizes) + lazy val lst = new LstThings(sizes) + lazy val ils = new IlsThings(sizes) + lazy val que = new QueThings(sizes) + lazy val stm = new StmThings(sizes) + lazy val trs = new TrsThings(sizes) + lazy val vec = new VecThings(sizes) + lazy val arb = new ArbThings(sizes) + lazy val ars = new ArsThings(sizes) + lazy val ast = new AstThings(sizes) + lazy val mhs = new MhsThings(sizes) + lazy val lhs = new LhsThings(sizes) + lazy val prq = new PrqThings(sizes) + lazy val muq = new MuqThings(sizes) + lazy val wra = new WraThings(sizes) + lazy val jix = new JixThings(sizes) + lazy val jln = new JlnThings(sizes) + } +} diff --git a/benchmark/src/main/scala/bench/Operations.scala b/benchmark/src/main/scala/bench/Operations.scala new file mode 100644 index 0000000..0a13126 --- /dev/null +++ b/benchmark/src/main/scala/bench/Operations.scala @@ -0,0 +1,138 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package bench.operate + +import java.util.stream._ +import java.util.{function => jf} + +import scala.collection.parallel.ParIterable + +import scala.compat.java8.StreamConverters._ +import scala.compat.java8.converterImpl._ +import scala.compat.java8.collectionImpl._ + +object CloseEnough { + import scala.math._ + def apply[A](a: A, b: => A): Boolean = a match { + case da: Double => b match { + case db: Double => (da.isNaN && db.isNaN) || abs(da - db) <= max(1, max(abs(da), abs(db)))*1e-6 + case x => a == x + } + case _ => a == b + } +} + +object OnInt { + def expensive(i: Int) = { var v = i.toDouble; var j = 0; while (j < 10) { v = math.exp(math.sin(v)); j += 1 }; v+j } + + def sum(a: Array[Int]): Int = { var s,i = 0; while (i < a.length) { s += a(i); i += 1 }; s } + def sum(t: Traversable[Int]): Int = t.sum + def sum(i: Iterator[Int]): Int = i.sum + def sum(s: IntStepper): Int = s.fold(0)(_ + _) + def sum(s: IntStream): Int = { + s.sum + /*var r = 0 + val it = s.iterator() + while(it.hasNext) r += it.nextInt() + r*/ + } + def psum(i: ParIterable[Int]): Int = i.sum + def psum(s: IntStream): Int = s.sum + + def trig(a: Array[Int]): Double = { var i = 0; var s = 0.0; while (i < a.length) { s += expensive(a(i)); i += 1 }; s } + def trig(t: Traversable[Int]): Double = t.map(expensive).sum + def trig(i: Iterator[Int]): Double = i.map(expensive).sum + def trig(s: IntStepper): Double = s.fold(0.0)((x,i) => x + expensive(i)) + def trig(s: IntStream): Double = s.mapToDouble(new jf.IntToDoubleFunction{ def applyAsDouble(i: Int) = expensive(i) }).sum + def ptrig(i: ParIterable[Int]): Double = i.map(expensive).sum + def ptrig(s: IntStream): Double = trig(s) + + def fmc(a: Array[Int]): Int = { var s,i = 0; while (i < a.length) { if (i%7 == 1) s += (i/7)*i; i += 1 }; s } + def fmc(t: Traversable[Int]): Int = t.filter(x => (x%7) == 1).map(x => (x/7)*x).sum + def fmc(i: Iterator[Int]): Int = i.filter(x => (x%7) == 1).map(x => (x/7)*x).sum + def fmc(s: IntStream): Int = s. + filter(new jf.IntPredicate { def test(x: Int) = (x%7) == 1 }). + map(new jf.IntUnaryOperator{ def applyAsInt(x: Int) = (x/7)*x }). + sum + def pfmc(i: ParIterable[Int]): Int = i.filter(x => (x%7) == 1).map(x => (x/7)*x).sum + def pfmc(s: IntStream): Int = fmc(s) + + def mdtc(a: Array[Int]): Int = { var i = 1; while(i < a.length) { if ((a(i) << 1) >= 42) return i-1; i += 1 }; i - 1 } + def mdtc(t: Traversable[Int]): Int = t.map(_ << 1).drop(1).takeWhile(_ < 42).size + def mdtc(i: Iterator[Int]): Int = i.map(_ << 1).drop(1).takeWhile(_ < 42).size + def mdtc(s: IntStream): Int = { + val temp = s.map(new jf.IntUnaryOperator { def applyAsInt(x: Int) = x << 1 }).skip(1) + val acc = new IntAccumulator + temp.allMatch(new jf.IntPredicate{ def test(x: Int) = if (x < 42) { acc += x; true } else false }) + acc.size.toInt + } +} + +object OnString { + def expensive(s: String) = { val h = scala.util.hashing.MurmurHash3.stringHash(s); OnInt.expensive(h) } + + def nbr(a: Array[String]): Int = { var s,i = 0; while (i < a.length) { if (a(i).charAt(a(i).length-1) < '5') s += 1; i += 1 }; s } + def nbr(t: Traversable[String]): Int = t.count(s => s.charAt(s.length-1) < '5') + def nbr(i: Iterator[String]): Int = i.count(s => s.charAt(s.length-1) < '5') + def nbr(p: Stepper[String]): Int = p.fold(0)((i,s) => if (s.charAt(s.length-1) < '5') i+1 else i) + def nbr(q: Stream[String]): Int = q.filter(new jf.Predicate[String] { def test(s: String) = s.charAt(s.length-1) < '5' }).count.toInt + def pnbr(i: ParIterable[String]): Int = i.count(s => s.charAt(s.length-1) < '5') + def pnbr(q: Stream[String]): Int = nbr(q) + + def htrg(a: Array[String]): Double = { var s = 0.0; var i = 0; while (i < a.length) { s += expensive(a(i)); i += 1 }; s } + def htrg(t: Traversable[String]): Double = t.map(expensive).sum + def htrg(i: Iterator[String]): Double = i.map(expensive).sum + def htrg(p: Stepper[String]): Double = p.fold(0.0)((x,s) => x + expensive(s)) + def htrg(q: Stream[String]): Double = q.mapToDouble(new jf.ToDoubleFunction[String]{ def applyAsDouble(s: String) = expensive(s) }).sum + def phtrg(i: ParIterable[String]): Double = i.map(expensive).sum + def phtrg(q: Stream[String]): Double = htrg(q) + + def fmc(a: Array[String]): Int = { + var s, i = 0 + while (i < a.length) { + val x = a(i) + if (x.charAt(x.length-1) == '1' && (x.length > 2 || (x.charAt(0) != '-' && x.length > 1))) s += 1 + i += 1 + } + s + } + def fmc(t: Traversable[String]): Int = + t.filter(x => x.charAt(x.length-1) == '1').map(x => if (x.charAt(0) == '-') x.substring(1) else x).count(_.length > 1) + def fmc(i: Iterator[String]): Int = + i.filter(x => x.charAt(x.length-1) == '1').map(x => if (x.charAt(0) == '-') x.substring(1) else x).count(_.length > 1) + def fmc(q: Stream[String]): Int = + q.filter(new jf.Predicate[String]{ def test(x: String) = x.charAt(x.length-1) == '1' }). + map[String](new jf.Function[String, String]{ def apply(x: String) = if (x.charAt(0) == '-') x.substring(1) else x }). + filter(new jf.Predicate[String]{ def test(x: String) = x.length > 1 }). + count.toInt + def pfmc(i: ParIterable[String]): Int = + i.filter(x => x.charAt(x.length-1) == '1').map(x => if (x.charAt(0) == '-') x.substring(1) else x).count(_.length > 1) + def pfmc(q: Stream[String]): Int = fmc(q) + + def mdtc(a: Array[String]): Int = { + var i = 1 + while (i < a.length) { + if (a(i).reverse.length >= 3) return i-1 + i += 1 + } + i-1 + } + def mdtc(t: Traversable[String]): Int = t.map(_.reverse).drop(1).takeWhile(_.length < 3).size + def mdtc(i: Iterator[String]): Int = i.map(_.reverse).drop(1).takeWhile(_.length < 3).size + def mdtc(q: Stream[String]): Int = { + val temp = q.map[String](new jf.UnaryOperator[String] { def apply(x: String) = x.reverse }).skip(1) + val acc = new Accumulator[String] + temp.allMatch(new jf.Predicate[String]{ def test(x: String) = if (x.length < 3) { acc += x; true } else false }) + acc.size.toInt + } +} diff --git a/benchmark/src/main/scala/bench/ParseJmhLog.scala b/benchmark/src/main/scala/bench/ParseJmhLog.scala new file mode 100644 index 0000000..0263c45 --- /dev/null +++ b/benchmark/src/main/scala/bench/ParseJmhLog.scala @@ -0,0 +1,160 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package bench.examine + +import scala.util._ +import scala.util.control.NonFatal + +object ParseJmhLog { + // Manual path to the log file (which may have been put there manually) + val logLocation = "results/jmhbench.log" + + // Warning--this is maintained manually! Please take care to keep it up to date. + val collectionNames = Map( + "arb" -> "collection.mutable.ArrayBuffer", + "arr" -> "Array", + "ars" -> "collection.mutable.ArraySeq", + "ast" -> "collection.mutable.ArrayStack", + "ils" -> "collection.immutable.ListSet", + "ish" -> "collection.immutable.HashSet", + "lhs" -> "collection.mutable.LinkedHashSet", + "lst" -> "collection.immutable.List", + "mhs" -> "collection.mutable.HashSet", + "muq" -> "collection.mutable.Queue", + "prq" -> "collection.mutable.PriorityQueue", + "que" -> "collection.immutable.Queue", + "stm" -> "collection.immutable.Stream", + "trs" -> "collection.immutable.TreeSet", + "vec" -> "collection.immutable.Vector", + "wra" -> "collection.mutable.WrappedArray", + "jix" -> "java.util.ArrayList", + "jln" -> "java.util.LinkedList" + ) + + // Warning--this is maintained manually! Please take care to keep it up to date. + val dataNames = Map( + "cI" -> "Int, base collection", + "cpI" -> "Int, par collection", + "iI" -> "Int, iterator on coll", + "ssI" -> "Int, serial stream", + "spI" -> "Int, parallel stream", + "tpI" -> "Int Stepper (can par)", + "tsI" -> "Int Stepper (seq only)", + "cS" -> "String, base collection", + "cpS" -> "String, par collection", + "iS" -> "String, iterator on coll", + "ssS" -> "String, serial stream", + "spS" -> "String, parallel stream", + "tpS" -> "String Stepper (can par)", + "tsS" -> "String Stepper (seq only)" + ) + val dataColumnWidth = dataNames.map(_._2).map(_.length).max + 1 + + // Warning--this is maintained manually! Please take care to keep it up to date. + val operationNames = Map( + "sum" -> "fast summation", + "psum" -> "fast parallel summation", + "nbr" -> "fast sum of lengths", + "pnbr" -> "fast parallel sum of lengths", + "trig" -> "slow trigonometry", + "ptrig" -> "slow parallel trigonometry", + "htrg" -> "slow trig on hashCode", + "phtrg" -> "slow parallel trig on hashCode", + "fmc" -> "filter/map/sum trio", + "pfmc" -> "parallel filter/map/sum trio", + "mdtc" -> "map/filter/take trio" + ) + + private def ensure(satisfied: Boolean)(not: => Unit): Unit = { + if (!satisfied) { + not + assert(satisfied) + } + } + + private def indicate(cols: Int, value: Double): String = { + val one = math.min((3*cols)/4, cols-5) + val me = math.rint(one*3*value).toInt + if (me <= 3*(cols-1)) { + val filled = + if ((me%3) != 0) "#"*(me/3) + (if ((me%3) == 1) "-" else "=") + else "#"*(me/3) + filled + " "*(cols - filled.length) + } + else "#"*(cols-4) + "~~# " + } + + case class Entry(op: String, coll: String, big: Boolean, data: String, speed: Double, errbar: Double) { + ensure(collectionNames contains coll){ println(coll) } + ensure(dataNames contains data){ println(data) } + ensure(operationNames contains op){ println(op) } + } + + def apply(f: java.io.File = new java.io.File(logLocation)) = { + val lines = { + val s = scala.io.Source.fromFile(f) + try { s.getLines().toVector } finally { s.close } + } + + val relevant = lines. + dropWhile(x => !(x contains "Run complete.")). + dropWhile(x => !(x contains "JmhBench.bench_")). + takeWhile(x => x contains "JmhBench.bench_"). + map{ x => + val ys = x.split("\\s+") + ys(1).split('_').drop(1) match { + case Array(dat, op, coll, n) => Entry (op, coll, n == "1", dat, ys(4).toDouble, ys(6).toDouble) + case _ => throw new Exception("Bad benchmark log line, please investigate manually.") + } + }. + toArray + + val normalized = relevant. + groupBy(e => (e.op, e.big, e.data.takeRight(1))). + mapValues{ vs => + val one = vs.find(e => e.coll == "arr" && e.data.startsWith("c")).get.speed + vs.map(v => v.copy(speed = v.speed/one, errbar = 100 * v.errbar/v.speed)) // Error bar in percent error from mean + }. + map(_._2). + toArray. + sortBy(_(0) match { case Entry(o, _, b, d, _, _) => (o, d.takeRight(1), b) }). + map(_.sortBy{ case Entry(_, c, _, d, _, _) => (c, d.dropRight(1)) }) + + normalized.foreach{ es => + println + println(">"*79) + println + var remaining = es.toList + while (remaining.nonEmpty) { + val (esa, esb) = remaining.span(_.coll == remaining.head.coll) + remaining = esb + println(operationNames(esa.head.op)) + println(if (esa.head.big) "10000 elements" else "10 elements") + println(collectionNames(esa.head.coll)) + esa.foreach{ e => + println( + s" %-${dataColumnWidth}s ".format(dataNames(e.data)) + + indicate(79 - (dataColumnWidth+3) - 16, e.speed) + + "%5.3f +- %5.1f %%".format(e.speed, e.errbar) + ) + } + if (remaining.nonEmpty) println + } + } + println + println("-"*79) + println + } + + def main(args: Array[String]): Unit = apply() +} diff --git a/build.sbt b/build.sbt index 0defedc..d6aa6cf 100644 --- a/build.sbt +++ b/build.sbt @@ -1,101 +1,152 @@ -import com.typesafe.tools.mima.plugin.{MimaPlugin, MimaKeys} +val disableDocs = + sys.props("nodocs") == "true" || + // on jdk 11 https://github.com/scala/scala-java8-compat/issues/160, seems to fail the build (not on 8) + !sys.props("java.version").startsWith("1.") -scalaModuleSettings - -scalaVersion := "2.11.5" - -snapshotScalaBinaryVersion := "2.11.5" - -organization := "org.scala-lang.modules" - -name := "scala-java8-compat" - -version := "0.3.0-SNAPSHOT" - -// important!! must come here (why?) -scalaModuleOsgiSettings - -OsgiKeys.exportPackage := Seq(s"scala.compat.java8.*;version=${version.value}") - -OsgiKeys.privatePackage := List("scala.concurrent.java8.*") - -libraryDependencies += "junit" % "junit" % "4.11" % "test" - -libraryDependencies += "com.novocode" % "junit-interface" % "0.10" % "test" - -MimaPlugin.mimaDefaultSettings - -MimaKeys.previousArtifact := None - -// run mima during tests -test in Test := { - MimaKeys.reportBinaryIssues.value - (test in Test).value -} - -testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a") - -sourceGenerators in Compile <+= sourceManaged in Compile map { dir => - def write(name: String, content: String) = { - val f = dir / "scala" / "compat" / "java8" / s"${name}.java" - IO.write(f, content) - f - } - ( - Seq(write("JFunction", CodeGen.factory)) ++ - (0 to 22).map(n => write("JFunction" + n, CodeGen.fN(n))) ++ - (0 to 22).map(n => write("JProcedure" + n, CodeGen.pN(n))) ++ - CodeGen.specializedF0.map((write _).tupled) ++ - CodeGen.specializedF1.map((write _).tupled) ++ - CodeGen.specializedF2.map((write _).tupled) - ) -} +lazy val JavaDoc = config("genjavadoc") extend Compile -sourceGenerators in Test <+= sourceManaged in Test map { dir => - def write(name: String, content: String) = { - val f = dir / "scala" / "compat" / "java8" / s"${name}.java" - IO.write(f, content) - f - } - Seq(write("TestApi", CodeGen.testApi)) +def jwrite(dir: java.io.File, pck: String = "scala/compat/java8")(name: String, content: String) = { + val f = dir / pck / s"${name}.java" + IO.write(f, content) + f } -initialize := { - // Run previously configured inialization... - initialize.value - // ... and then check the Java version. - val specVersion = sys.props("java.specification.version") - if (Set("1.5", "1.6", "1.7") contains specVersion) - sys.error("Java 8 or higher is required for this project.") +def osgiExport(scalaVersion: String, version: String) = { + (CrossVersion.partialVersion(scalaVersion) match { + case Some((2, 11)) => Seq(s"scala.runtime.java8.*;version=${version}") + case _ => Nil + }) ++ Seq(s"scala.compat.java8.*;version=${version}") } -val disableDocs = sys.props("nodocs") == "true" - -publishArtifact in packageDoc := !disableDocs - -lazy val JavaDoc = config("genjavadoc") extend Compile - -sources in (Compile, doc) := { - val orig = (sources in (Compile, doc)).value - orig.filterNot(_.getName.endsWith(".java")) // raw types not cooked by scaladoc: https://issues.scala-lang.org/browse/SI-8449 -} - -inConfig(JavaDoc)(Defaults.configSettings) ++ (if (disableDocs) Nil else Seq( - packageDoc in Compile <<= packageDoc in JavaDoc, - sources in JavaDoc <<= (target, compile in Compile, sources in Compile) map {(t, c, s) => - val allJavaSources = (t / "java" ** "*.java").get ++ s.filter(_.getName.endsWith(".java")) - allJavaSources.filterNot(_.getName.contains("FuturesConvertersImpl.java")) // this file triggers bugs in genjavadoc +lazy val commonSettings = Seq( + crossScalaVersions := Seq("2.13.16", "2.12.20", "2.11.12", "3.3.6"), + scalaVersion := crossScalaVersions.value.head, + // we could make this stricter again (BinaryAndSourceCompatible) + // after our reference version was built on Scala 3.1.x + versionPolicyIntention := Compatibility.BinaryCompatible, + Compile / unmanagedSourceDirectories ++= { + (Compile / unmanagedSourceDirectories).value.flatMap { dir => + CrossVersion.partialVersion(scalaVersion.value) match { + case Some((2, 11)) => Seq(file(dir.getPath ++ "-2.13-"), file(dir.getPath ++ "-2.11")) + case Some((2, 12)) => Seq(file(dir.getPath ++ "-2.13-")) + case _ => Seq(file(dir.getPath ++ "-2.13+")) + } + } }, - javacOptions in JavaDoc := Seq(), - artifactName in packageDoc in JavaDoc := ((sv, mod, art) => "" + mod.name + "_" + sv.binary + "-" + mod.revision + "-javadoc.jar"), - libraryDependencies += compilerPlugin("com.typesafe.genjavadoc" % "genjavadoc-plugin" % "0.8" cross CrossVersion.full), - scalacOptions in Compile <+= target map (t => "-P:genjavadoc:out=" + (t / "java")) -)) - -initialCommands := -"""|import scala.concurrent._ - |import ExecutionContext.Implicits.global - |import java.util.concurrent.{CompletionStage,CompletableFuture} - |import scala.compat.java8.FutureConverter._ - |""".stripMargin - + Test / unmanagedSourceDirectories ++= { + (Test / unmanagedSourceDirectories).value.flatMap { dir => + CrossVersion.partialVersion(scalaVersion.value) match { + case Some((2, 11)) => Seq(file(dir.getPath ++ "-2.13-"), file(dir.getPath ++ "-2.11")) + case Some((2, 12)) => Seq(file(dir.getPath ++ "-2.13-")) + case _ => Seq(file(dir.getPath ++ "-2.13+")) + } + } + }, +) + +lazy val scalaJava8Compat = (project in file(".")) + .settings(ScalaModulePlugin.scalaModuleSettings) + .enablePlugins(SbtOsgi) + .settings(ScalaModulePlugin.scalaModuleOsgiSettings) + .settings(commonSettings) + .settings( + name := "scala-java8-compat", + scalaModuleAutomaticModuleName := Some("scala.compat.java8"), + ) + .settings( + OsgiKeys.exportPackage := osgiExport(scalaVersion.value, version.value), + + OsgiKeys.privatePackage := List("scala.concurrent.java8.*"), + + libraryDependencies += "junit" % "junit" % "4.13.2" % "test", + + libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.12.0" % "test", + + libraryDependencies += "com.github.sbt" % "junit-interface" % "0.13.3" % "test", + + // see https://github.com/scala/scala-java8-compat/issues/247 + versionPolicyPreviousVersions := versionPolicyPreviousVersions.value.flatMap { + case VersionNumber(Seq(0, _*), _, _) => Nil + case VersionNumber(Seq(1, 0, n, _*), _, _) if n <= 1 => Nil + case v => Seq(v) + }, + + // shouldn't be needed anymore after our reference version is a version + // built on Scala 3.1.x + mimaBinaryIssueFilters := { + import com.typesafe.tools.mima.core.ProblemFilters._ + import com.typesafe.tools.mima.core._ + Seq( + exclude[IncompatibleSignatureProblem]("scala.compat.java8.*"), + exclude[IncompatibleSignatureProblem]("scala.concurrent.java8.*"), + ), + }, + + testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), + + (Compile / sourceGenerators) += Def.task { + val f = (Compile / sourceManaged).value / "FunctionConverters.scala" + IO.write(f, WrapFnGen.code) + Seq(f) + }.taskValue, + + Compile / sourceGenerators += Def.task { + val dir = (Compile / sourceManaged).value + val write = jwrite(dir) _ + if(scalaVersion.value.startsWith("2.11.")) { + Seq(write("JFunction", CodeGen.factory)) ++ + (0 to 22).map(n => write("JFunction" + n, CodeGen.fN(n))) ++ + (0 to 22).map(n => write("JProcedure" + n, CodeGen.pN(n))) ++ + CodeGen.specializedF0.map(write.tupled) ++ + CodeGen.specializedF1.map(write.tupled) ++ + CodeGen.specializedF2.map(write.tupled) ++ + CodeGen.packageDummy.map((jwrite(dir, "java/runtime/java8") _).tupled) + } else CodeGen.create212.map(write.tupled) + }.taskValue, + + Test / sourceGenerators += Def.task { + Seq(jwrite((Test / sourceManaged).value)("TestApi", CodeGen.testApi)) + }.taskValue, + + initialize := { + // Run previously configured inialization... + val _ = initialize.value + // ... and then check the Java version. + val specVersion = sys.props("java.specification.version") + if (Set("1.5", "1.6", "1.7") contains specVersion) + sys.error("Java 8 or higher is required for this project.") + }, + ) + .settings( + inConfig(JavaDoc)(Defaults.configSettings) ++ { + if (disableDocs) Nil + else Seq( + Compile / packageDoc := (JavaDoc / packageDoc).value, + JavaDoc / sources := { + val allJavaSources = + (target.value / "java" ** "*.java").get ++ + (Compile / sources).value.filter(_.getName.endsWith(".java")) + allJavaSources.filterNot(_.getName.contains("FuturesConvertersImpl.java")) // this file triggers bugs in genjavadoc + }, + JavaDoc / javacOptions := Seq("-Xdoclint:none"), + JavaDoc / packageDoc / artifactName := ((sv, mod, art) => "" + mod.name + "_" + sv.binary + "-" + mod.revision + "-javadoc.jar"), + libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { + case Some((3, _)) => Seq() + case Some((2, 11)) => Seq(compilerPlugin("com.typesafe.genjavadoc" % "genjavadoc-plugin" % "0.18" cross CrossVersion.full)) + case _ => Seq(compilerPlugin("com.typesafe.genjavadoc" % "genjavadoc-plugin" % "0.19" cross CrossVersion.full)) + }), + Compile / scalacOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match { + case Some((3, _)) => Seq() + case _ => Seq(s"""-P:genjavadoc:out=${target.value / "java"}""") + }), + ) + } + ) + .settings( + initialCommands := + """|import scala.concurrent._ + |import ExecutionContext.Implicits.global + |import java.util.concurrent.{CompletionStage,CompletableFuture} + |import scala.compat.java8.FutureConverters._ + |""".stripMargin + ) diff --git a/project/CodeGen.scala b/project/CodeGen.scala index 0e0234c..bb3a37e 100644 --- a/project/CodeGen.scala +++ b/project/CodeGen.scala @@ -1,5 +1,13 @@ /* - * Copyright (C) 2012-2014 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ sealed abstract class Type(val code: Char, val prim: String, val ref: String) { @@ -40,7 +48,7 @@ object CodeGen { |$packaging | |@FunctionalInterface - |public interface JFunction0 extends scala.Function0 { + |public interface JFunction0 extends scala.Function0, java.io.Serializable { | default void $initName() { | }; |""".stripMargin @@ -51,7 +59,7 @@ object CodeGen { |$packaging | |@FunctionalInterface - |public interface JFunction1 extends scala.Function1 { + |public interface JFunction1 extends scala.Function1, java.io.Serializable { | default void $initName() { | }; | @@ -77,7 +85,7 @@ object CodeGen { |$packaging | |@FunctionalInterface - |public interface JFunction$n<$tparams, R> extends scala.Function$n<$tparams, R> { + |public interface JFunction$n<$tparams, R> extends scala.Function$n<$tparams, R>, java.io.Serializable { | default void $initName() { | }; | @@ -288,7 +296,7 @@ object CodeGen { private val copyright = """ |/* - | * Copyright (C) 2012-2015 Typesafe Inc. + | * Copyright (C) Lightbend Inc. | */""".stripMargin.trim private def function0SpecMethods = { @@ -331,26 +339,9 @@ object CodeGen { |} |""".stripMargin.trim } - // andThen / compose variants are no longer needed under 2.11 (@unspecialized has been fixed), - // but harmless. With them, we can use the same artifact for 2.10 and 2.11 - val compose = specialized("compose", function1Spec) { - case (name, List(t1, r1)) => - s""" - |default scala.Function1 $name(scala.Function1 g) { - | return compose(g); - |}""".stripMargin.trim - } - val andThen = specialized("andThen", function1Spec) { - case (name, List(t1, r1)) => - s""" - |default scala.Function1 $name(scala.Function1 g) { - | return andThen(g); - |}""".stripMargin.trim - } - indent(List(apply, compose, andThen).mkString("\n\n")) + indent(List(apply).mkString("\n\n")) } - // No longer needed under 2.11 (@unspecialized has been fixed), but harmless to keep around to avoid cross-publishing this artifact. private def function2SpecMethods = { val apply = specialized("apply", function2Spec) { case (name, List(t1, t2, r)) => @@ -363,21 +354,7 @@ object CodeGen { |} |""".stripMargin.trim } - val curried = specialized("curried", function2Spec) { - case (name, List(t1, t2, r)) => - s""" - |default scala.Function1 $name() { - | return curried(); - |}""".stripMargin.trim - } - val tupled = specialized("tupled", function2Spec) { - case (name, List(t1, t2, r)) => - s""" - |default scala.Function1 $name() { - | return tupled(); - |}""".stripMargin.trim - } - indent(List(apply, curried, tupled).mkString("\n\n")) + indent(List(apply).mkString("\n\n")) } def specializedSuffix(tparamNames: List[String], tps: List[Type]): String = { @@ -464,4 +441,101 @@ object CodeGen { } def indent(s: String) = s.linesIterator.map(" " + _).mkString("\n") + + /** Create a dummy class to put into scala.runtime.java8 for Scala 2.11 so that wildcard imports from the + * package won't fail. This allows importing both `scala.runtime.java8.*` and `scala.compat.java8.*` for + * source compatibility between 2.11 and 2.12. + */ + def packageDummy: Seq[(String, String)] = Seq( + ( "PackageDummy", + s"""$copyright + | + |package scala.runtime.java8; + | + |public final class PackageDummy { + | private PackageDummy() {} + |} + """.stripMargin) + ) + + /** Create the simpler JFunction and JProcedure sources for Scala 2.12+ */ + def create212: Seq[(String, String)] = { + val blocks = for(i <- 0 to 22) yield { + val ts = (1 to i).map(i => s"T$i").mkString(", ") + val tsComma = if(ts.isEmpty) "" else s"$ts," + val tsAngled = if(ts.isEmpty) "" else s"<$ts>" + val paramTs = (1 to i).map(i => s"T$i t$i").mkString(", ") + val argTs = (1 to i).map(i => s"t$i").mkString(", ") + + ( + ( s"JFunction$i", + s"""$copyright + |$packaging + | + |/** @deprecated Use scala.Function$i in Scala 2.12 */ + |@Deprecated + |@FunctionalInterface + |public interface JFunction$i<$tsComma R> extends scala.Function$i<$tsComma R>, java.io.Serializable {} + """.stripMargin), + ( s"JProcedure$i", + s"""$copyright + |$packaging + | + |import scala.runtime.BoxedUnit; + | + |@FunctionalInterface + |public interface JProcedure$i$tsAngled extends scala.Function$i<$tsComma BoxedUnit> { + | void applyVoid($paramTs); + | default BoxedUnit apply($paramTs) { applyVoid($argTs); return BoxedUnit.UNIT; } + |} + """.stripMargin), + s""" /** @deprecated Not needed anymore in Scala 2.12 */ + | @Deprecated + | public static <$tsComma R> scala.Function$i<$tsComma R> func(scala.Function$i<$tsComma R> f) { return f; } + | public static $tsAngled scala.Function$i<$tsComma BoxedUnit> proc(JProcedure$i$tsAngled p) { return p; } + """.stripMargin + ) + } + + def specialize(args: String): List[(Int, String, String)] = { + def combinations(l: List[String]): List[List[Char]] = + l.foldRight(List(Nil: List[Char])) { (s, z) => s.toList.flatMap(c => z.map(c :: _)) } + val split = args.split(",") + combinations(split.toList).map { s => + val types = s.map { + case 'B' => "Byte" + case 'S' => "Short" + case 'V' => "BoxedUnit" + case 'I' => "Integer" + case 'J' => "Long" + case 'C' => "Character" + case 'F' => "Float" + case 'D' => "Double" + case 'Z' => "Boolean" + } + (split.length-1, (types.tail :+ types.head).mkString(", "), "$mc" + s.mkString + "$sp") + } + } + + val specialized = + List("V", "V,IJFD", "V,IJD,IJD").flatMap(specialize).map { case (i, a, sp) => + s" public static scala.Function$i<$a> procSpecialized(JFunction$i$sp f) { return (scala.Function$i<$a>)(Object)f; }" } ++ + List("BSIJCFDZ", "ZIFJD,IJFD", "ZIFJD,IJD,IJD").flatMap(specialize).map { case (i, a, sp) => + s" public static scala.Function$i<$a> funcSpecialized(JFunction$i$sp f) { return (scala.Function$i<$a>)(Object)f; }" } + + (blocks.map(_._1) ++ blocks.map(_._2)) :+ + ( "JFunction", + s"""$copyright + |$packaging + | + |import scala.runtime.BoxedUnit; + |import scala.runtime.java8.*; + | + |public final class JFunction { + | private JFunction() {} + |${specialized.mkString("\n")} + |${blocks.map(_._3).mkString("\n")} + |} + """.stripMargin) + } } diff --git a/project/WrapFnGen.scala b/project/WrapFnGen.scala new file mode 100644 index 0000000..eaf4482 --- /dev/null +++ b/project/WrapFnGen.scala @@ -0,0 +1,236 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +import scala.collection.mutable + +object WrapFnGen { + /** all 43 interfaces in java.util.function package */ + private lazy val allJfn = Seq( + "BiConsumer[T, U]: accept(T, U): Unit", + "BiFunction[T, U, R]: apply(T, U): R", + "BiPredicate[T, U]: test(T, U): Boolean", + "BinaryOperator[T]: apply(T, T): T", + "BooleanSupplier: getAsBoolean: Boolean", + "Consumer[T]: accept(T): Unit", + "DoubleBinaryOperator: applyAsDouble(Double, Double): Double", + "DoubleConsumer: accept(Double): Unit", + "DoubleFunction[R]: apply(Double): R", + "DoublePredicate: test(Double): Boolean", + "DoubleSupplier: getAsDouble: Double", + "DoubleToIntFunction: applyAsInt(Double): Int", + "DoubleToLongFunction: applyAsLong(Double): Long", + "DoubleUnaryOperator: applyAsDouble(Double): Double", + "Function[T, R]: apply(T): R", + "IntBinaryOperator: applyAsInt(Int, Int): Int", + "IntConsumer: accept(Int): Unit", + "IntFunction[R]: apply(Int): R", + "IntPredicate: test(Int): Boolean", + "IntSupplier: getAsInt: Int", + "IntToDoubleFunction: applyAsDouble(Int): Double", + "IntToLongFunction: applyAsLong(Int): Long", + "IntUnaryOperator: applyAsInt(Int): Int", + "LongBinaryOperator: applyAsLong(Long, Long): Long", + "LongConsumer: accept(Long): Unit", + "LongFunction[R]: apply(Long): R", + "LongPredicate: test(Long): Boolean", + "LongSupplier: getAsLong: Long", + "LongToDoubleFunction: applyAsDouble(Long): Double", + "LongToIntFunction: applyAsInt(Long): Int", + "LongUnaryOperator: applyAsLong(Long): Long", + "ObjDoubleConsumer[T]: accept(T, Double): Unit", + "ObjIntConsumer[T]: accept(T, Int): Unit", + "ObjLongConsumer[T]: accept(T, Long): Unit", + "Predicate[T]: test(T): Boolean", + "Supplier[T]: get: T", + "ToDoubleBiFunction[T, U]: applyAsDouble(T, U): Double", + "ToDoubleFunction[T]: applyAsDouble(T): Double", + "ToIntBiFunction[T, U]: applyAsInt(T, U): Int", + "ToIntFunction[T]: applyAsInt(T): Int", + "ToLongBiFunction[T, U]: applyAsLong(T, U): Long", + "ToLongFunction[T]: applyAsLong(T): Long", + "UnaryOperator[T]: apply(T): T", + ).map(Jfn.apply) + + /** @param sig - ex: "BiConsumer[T,U]: accept(T,U): Unit" + * or "DoubleToIntFunction: applyAsInt(Double): Int" */ + case class Jfn(sig: String) { + val Array( + iface, // interface name included type args, ex: BiConsumer[T,U] | DoubleToIntFunction + _method, // Temp val, ex: accept(T,U) | applyAsInt(Double) + rType // java function return type, ex: Unit | Int + ) = sig.split(':').map(_.trim) + + // interface name and java interface's type args, + // ex: ("BiConsumer", "[T,U]") | ("DoubleToIntFunction", "") + val (ifaceName, jtargs) = iface.span(_ != '[') + + // java method name and temp val, ex: "accept" -> "(T,U)" | "applyAsInt" -> "(Double)" + val (jmethod, _targs) = _method.span(_ != '(') + + // java method's type args, ex: Seq("T", "U") | Seq("Double") + val pTypes: Seq[String] = _targs.unwrapMe + + // arguments names, ex: Seq("x1", "x2") + val args: Seq[String] = pTypes.indices.map { i => "x" + (i+1) } + // ex: "(x1: T, x2: U)" | "(x1: Double)" + val argsDecl: String = args.zip(pTypes).map { + // Don't really need this case. Only here so the generated code is + // exactly == the code gen by the old method using scala-compiler + scala-reflect + case (p, t @ ("Double"|"Long"|"Int")) => s"$p: scala.$t" + case (p, t) => s"$p: $t" + }.mkString("(", ", ", ")") + // ex: "(x1, x2)" + val argsCall: String = args.mkString("(", ", ", ")") + + // arity of scala.Function + val arity: Int = args.length + + // ex: "java.util.function.BiConsumer[T,U]" | "java.util.function.DoubleToIntFunction" + val javaFn = s"java.util.function.$iface" + + // ex: "scala.Function2[T, U, Unit]" | "scala.Function1[Double, Int]" + val scalaFn = s"scala.Function$arity[${(pTypes :+ rType).mkString(", ")}]" + + def fromJavaCls: String = + s"""class FromJava$iface(jf: $javaFn) extends $scalaFn { + | def apply$argsDecl = jf.$jmethod$argsCall + |}""".stripMargin + + val richAsFnClsName = s"Rich${ifaceName}AsFunction$arity$jtargs" + def richAsFnCls: String = + s"""class $richAsFnClsName(private val underlying: $javaFn) extends AnyVal { + | @inline def asScala: $scalaFn = new FromJava$iface(underlying) + |}""".stripMargin + + def asJavaCls: String = + s"""class AsJava$iface(sf: $scalaFn) extends $javaFn { + | def $jmethod$argsDecl = sf.apply$argsCall + |}""".stripMargin + + val richFnAsClsName = s"RichFunction${arity}As$iface" + def richFnAsCls: String = + s"""class $richFnAsClsName(private val underlying: $scalaFn) extends AnyVal { + | @inline def asJava: $javaFn = new AsJava$iface(underlying) + |}""".stripMargin + + def converterImpls: String = + s"""$fromJavaCls\n + |$richAsFnCls\n + |$asJavaCls\n + |$richFnAsCls\n + |""".stripMargin + + /** @return "implicit def enrichAsJavaXX.." code */ + def enrichAsJavaDef: String = { + // This is especially tricky because functions are contravariant in their arguments + // Need to prevent e.g. Any => String from "downcasting" itself to Int => String; we want the more exact conversion + // Instead of foo[A](f: (Int, A) => Long): Fuu[A] = new Foo[A](f) + // we want foo[X, A](f: (X, A) => Long)(implicit evX: Int =:= X): Fuu[A] = new Foo[A](f.asInstanceOf[(Int, A) => Long]) + // Instead of bar[A](f: A => A): Brr[A] = new Foo[A](f) + // we want bar[A, B](f: A => B)(implicit evB: A =:= B): Brr[A] = new Foo[A](f.asInstanceOf[A => B]) + + val finalTypes = Set("Double", "Long", "Int", "Boolean", "Unit") + val An = "A(\\d+)".r + val numberedA = mutable.Set.empty[Int] + val evidences = mutable.ArrayBuffer.empty[(String, String)] // ex: "A0" -> "Double" + numberedA ++= pTypes.collect{ case An(digits) if (digits.length < 10) => digits.toInt } + val scalafnTnames = (pTypes :+ rType).zipWithIndex.map { + case (pt, i) if i < pTypes.length && finalTypes(pt) || !finalTypes(pt) && pTypes.take(i).contains(pt) => + val j = Iterator.from(i).dropWhile(numberedA).next() + val genericName = s"A$j" + numberedA += j + evidences += (genericName -> pt) + genericName + case (pt, _) => pt + } + val scalafnTdefs = scalafnTnames.dropRight(if (finalTypes(rType)) 1 else 0).wrapMe() + val scalaFnGeneric = s"scala.Function${scalafnTnames.length - 1}[${scalafnTnames.mkString(", ")}]" + val evs = evidences + .map { case (generic, specific) => s"ev$generic: =:=[$generic, $specific]" } + .wrapMe("(implicit ", ")") + val sf = if (evs.isEmpty) "sf" else s"sf.asInstanceOf[$scalaFn]" + s"@inline implicit def enrichAsJava$ifaceName$scalafnTdefs(sf: $scalaFnGeneric)$evs: $richFnAsClsName = new $richFnAsClsName($sf)" + } + + def asScalaFromDef = s"@inline def asScalaFrom$iface(jf: $javaFn): $scalaFn = new FromJava$iface(jf)" + + def asJavaDef = s"@inline def asJava$iface(sf: $scalaFn): $javaFn = new AsJava$iface(sf)" + + def enrichAsScalaDef = s"@inline implicit def enrichAsScalaFrom$iface(jf: $javaFn): $richAsFnClsName = new $richAsFnClsName(jf)" + } + + def converters: String = { + val groups = allJfn + .map(jfn => jfn.jtargs.unwrapMe.length -> jfn.enrichAsJavaDef) + .groupBy(_._1) + .toSeq + .sortBy(_._1) + .reverse + val maxPriority = groups.head._1 + groups.map { case (priority, seq) => + val parent = + if (priority == maxPriority) "" + else s" extends Priority${priority + 1}FunctionConverters" + val me = + if (priority == 0) "package object FunctionConverters" + else s"trait Priority${priority}FunctionConverters" + + val enrichAsJava = seq.map(_._2) + val (asXx, enrichAsScala) = + if (priority != 0) Nil -> Nil + else allJfn.map { jfn => jfn.asScalaFromDef + "\n\n" + jfn.asJavaDef } -> + allJfn.map(_.enrichAsScalaDef) + + s"""$me$parent { + | import functionConverterImpls._ + |${asXx.mkString("\n\n\n").indentMe} + |${enrichAsJava.mkString("\n\n").indentMe} + |${enrichAsScala.mkString("\n\n").indentMe} + |}""".stripMargin + }.mkString("\n\n\n") + } + + def code: String = + s""" + |/* + | * Copyright EPFL and Lightbend, Inc. + | * This file auto-generated by WrapFnGen.scala. Do not modify directly. + | */ + | + |package scala.compat.java8 + | + |import language.implicitConversions + | + |package functionConverterImpls { + |${allJfn.map(_.converterImpls).mkString("\n").indentMe} + |} + |\n + |$converters + |""".stripMargin + + implicit class StringExt(private val s: String) extends AnyVal { + def indentMe: String = s.linesIterator.map(" " + _).mkString("\n") + def unwrapMe: Seq[String] = s match { + case "" => Nil + case _ => s + .substring(1, s.length - 1) // drop "(" and ")" or "[" and "]" + .split(',').map(_.trim).toSeq + } + } + + implicit class WrapMe(private val s: Seq[String]) extends AnyVal { + def wrapMe(start: String = "[", end: String = "]"): String = s match { + case Nil => "" + case _ => s.mkString(start, ", ", end) + } + } +} diff --git a/project/build.properties b/project/build.properties index 748703f..c02c575 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.7 +sbt.version=1.11.3 diff --git a/project/plugins.sbt b/project/plugins.sbt index 5f604e6..983950c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,3 +1 @@ -addSbtPlugin("org.scala-lang.modules" % "scala-module-plugin" % "1.0.2") - -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.6") +addSbtPlugin("org.scala-lang.modules" % "sbt-scala-module" % "3.3.0") diff --git a/src/main/java-2.13+/scala/compat/java8/ScalaStreamSupport.java b/src/main/java-2.13+/scala/compat/java8/ScalaStreamSupport.java new file mode 100644 index 0000000..2071d18 --- /dev/null +++ b/src/main/java-2.13+/scala/compat/java8/ScalaStreamSupport.java @@ -0,0 +1,384 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8; + +import java.util.stream.*; + +import scala.collection.*; +import scala.jdk.javaapi.StreamConverters; + +/** + * This class contains static utility methods for creating Java Streams from Scala Collections, similar + * to the methods in {@code java.util.stream.StreamSupport} for other Java types. It is intended for + * use from Java code. In Scala code, you can use the extension methods provided by + * {@code scala.compat.java8.StreamConverters} instead. + * + * Streams created from immutable Scala collections are also immutable. Mutable collections should + * not be modified concurrently. There are no guarantees for success or failure modes of existing + * streams in case of concurrent modifications. + */ +public class ScalaStreamSupport { + ///////////////////// + // Generic Streams // + ///////////////////// + + /** + * Generates a Stream that traverses a Scala collection. + *

+ * Parallel processing is only efficient for collections that have a Stepper implementation + * which supports efficient splitting. For collections where this is the case, the stepper + * method has a return type marked with EfficientSplit. + * + * @param coll The IterableOnce to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream stream(IterableOnce coll) { + return StreamConverters.asJavaSeqStream(coll); + } + + /** + * Generates a Stream that traverses the keys of a scala.collection.Map. + *

+ * Parallel processing is only efficient for Maps that have a keyStepper implementation + * which supports efficient splitting. For collections where this is the case, the keyStepper + * method has a return type marked with EfficientSplit. + * + * @param coll The Map to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamKeys(Map coll) { + return StreamSupport.stream(coll.keyStepper(StepperShape.anyStepperShape()).spliterator(), false); + } + + /** + * Generates a Stream that traverses the values of a scala.collection.Map. + *

+ * Parallel processing is only efficient for Maps that have a valueStepper implementation + * which supports efficient splitting. For collections where this is the case, the valueStepper + * method has a return type marked with EfficientSplit. + * + * @param coll The Map to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamValues(Map coll) { + return StreamSupport.stream(coll.>valueStepper(StepperShape.anyStepperShape()).spliterator(), false); + } + + /** + * Generates a Stream that traverses the key-value pairs of a scala.collection.Map. + *

+ * Parallel processing is only efficient for collections that have a Stepper implementation + * which supports efficient splitting. For collections where this is the case, the stepper + * method has a return type marked with EfficientSplit. + * + * @param coll The Map to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream< scala.Tuple2 > stream(Map coll) { + return StreamConverters.asJavaSeqStream(coll); + } + + /** + * Generates a Stream that traverses any Scala collection by accumulating its entries + * into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The collection to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamAccumulated(IterableOnce coll) { + return StreamConverters.asJavaSeqStream(scala.jdk.AnyAccumulator.from(coll)); + } + + /** + * Generates a Stream that traverses the keys of any Scala map by + * accumulating those keys into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing keys to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamAccumulatedKeys(Map coll) { + return StreamConverters.asJavaSeqStream(scala.jdk.AnyAccumulator.from(coll.keysIterator())); + } + + /** + * Generates a Stream that traverses the values of any Scala map by + * accumulating those values into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing values to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamAccumulatedValues(Map coll) { + return StreamConverters.asJavaSeqStream(scala.jdk.AnyAccumulator.from(coll.valuesIterator())); + } + + //////////////////// + // Double Streams // + //////////////////// + + /** + * Generates a DoubleStream that traverses a Scala collection. + *

+ * Parallel processing is only efficient for collections that have a Stepper implementation + * which supports efficient splitting. For collections where this is the case, the stepper + * method has a return type marked with EfficientSplit. + * + * @param coll The IterableOnce to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStream(IterableOnce coll) { + return StreamConverters.asJavaSeqDoubleStream(coll); + } + + /** + * Generates a DoubleStream that traverses the keys of a scala.collection.Map. + *

+ * Parallel processing is only efficient for Maps that have a keyStepper implementation + * which supports efficient splitting. For collections where this is the case, the keyStepper + * method has a return type marked with EfficientSplit. + * + * @param coll The Map to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamKeys(Map coll) { + return StreamSupport.doubleStream(coll.keyStepper((StepperShape)(Object)StepperShape.doubleStepperShape()).spliterator(), false); + } + + /** + * Generates a DoubleStream that traverses the values of a scala.collection.Map. + *

+ * Parallel processing is only efficient for Maps that have a valueStepper implementation + * which supports efficient splitting. For collections where this is the case, the valueStepper + * method has a return type marked with EfficientSplit. + * + * @param coll The Map to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamValues(Map coll) { + return StreamSupport.doubleStream(coll.valueStepper((StepperShape)(Object)StepperShape.doubleStepperShape()).spliterator(), false); + } + + /** + * Generates a DoubleStream that traverses any Scala collection by accumulating its entries + * into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The collection to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamAccumulated(IterableOnce coll) { + return StreamConverters.asJavaSeqDoubleStream((IterableOnce)(Object)scala.jdk.DoubleAccumulator$.MODULE$.fromSpecific((IterableOnce)(Object)coll)); + } + + /** + * Generates a DoubleStream that traverses the keys of any Scala map by + * accumulating those keys into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing keys to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamAccumulatedKeys(Map coll) { + return StreamConverters.asJavaSeqDoubleStream((IterableOnce)(Object)scala.jdk.DoubleAccumulator$.MODULE$.fromSpecific((IterableOnce)(Object)coll.keysIterator())); + } + + /** + * Generates a DoubleStream that traverses the values of any Scala map by + * accumulating those values into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing values to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamAccumulatedValues(Map coll) { + return StreamConverters.asJavaSeqDoubleStream((IterableOnce)(Object)scala.jdk.DoubleAccumulator$.MODULE$.fromSpecific((IterableOnce)(Object)coll.valuesIterator())); + } + + ///////////////// + // Int Streams // + ///////////////// + + /** + * Generates a IntStream that traverses a Scala collection. + *

+ * Parallel processing is only efficient for collections that have a Stepper implementation + * which supports efficient splitting. For collections where this is the case, the stepper + * method has a return type marked with EfficientSplit. + * + * @param coll The IterableOnce to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStream(IterableOnce coll) { + return StreamConverters.asJavaSeqIntStream(coll); + } + + /** + * Generates a IntStream that traverses the keys of a scala.collection.Map. + *

+ * Parallel processing is only efficient for Maps that have a keyStepper implementation + * which supports efficient splitting. For collections where this is the case, the keyStepper + * method has a return type marked with EfficientSplit. + * + * @param coll The Map to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamKeys(Map coll) { + return StreamSupport.intStream(coll.keyStepper((StepperShape)(Object)StepperShape.intStepperShape()).spliterator(), false); + } + + /** + * Generates a IntStream that traverses the values of a scala.collection.Map. + *

+ * Parallel processing is only efficient for Maps that have a valueStepper implementation + * which supports efficient splitting. For collections where this is the case, the valueStepper + * method has a return type marked with EfficientSplit. + * + * @param coll The Map to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamValues(Map coll) { + return StreamSupport.intStream(coll.valueStepper((StepperShape)(Object)StepperShape.intStepperShape()).spliterator(), false); + } + + /** + * Generates a IntStream that traverses any Scala collection by accumulating its entries + * into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The collection to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamAccumulated(IterableOnce coll) { + return StreamConverters.asJavaSeqIntStream((IterableOnce)(Object)scala.jdk.IntAccumulator$.MODULE$.fromSpecific((IterableOnce)(Object)coll)); + } + + /** + * Generates a IntStream that traverses the keys of any Scala map by + * accumulating those keys into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing keys to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamAccumulatedKeys(Map coll) { + return StreamConverters.asJavaSeqIntStream((IterableOnce)(Object)scala.jdk.IntAccumulator$.MODULE$.fromSpecific((IterableOnce)(Object)coll.keysIterator())); + } + + /** + * Generates a IntStream that traverses the values of any Scala map by + * accumulating those values into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing values to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamAccumulatedValues(Map coll) { + return StreamConverters.asJavaSeqIntStream((IterableOnce)(Object)scala.jdk.IntAccumulator$.MODULE$.fromSpecific((IterableOnce)(Object)coll.valuesIterator())); + } + + ////////////////// + // Long Streams // + ////////////////// + + /** + * Generates a LongStream that traverses a Scala collection. + *

+ * Parallel processing is only efficient for collections that have a Stepper implementation + * which supports efficient splitting. For collections where this is the case, the stepper + * method has a return type marked with EfficientSplit. + * + * @param coll The IterableOnce to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStream(IterableOnce coll) { + return StreamConverters.asJavaSeqLongStream(coll); + } + + /** + * Generates a LongStream that traverses the keys of a scala.collection.Map. + *

+ * Parallel processing is only efficient for Maps that have a keyStepper implementation + * which supports efficient splitting. For collections where this is the case, the keyStepper + * method has a return type marked with EfficientSplit. + * + * @param coll The Map to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamKeys(Map coll) { + return StreamSupport.longStream(coll.keyStepper((StepperShape)(Object)StepperShape.doubleStepperShape()).spliterator(), false); + } + + /** + * Generates a LongStream that traverses the values of a scala.collection.Map. + *

+ * Parallel processing is only efficient for Maps that have a valueStepper implementation + * which supports efficient splitting. For collections where this is the case, the valueStepper + * method has a return type marked with EfficientSplit. + * + * @param coll The Map to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamValues(Map coll) { + return StreamSupport.longStream(coll.valueStepper((StepperShape)(Object)StepperShape.doubleStepperShape()).spliterator(), false); + } + + /** + * Generates a LongStream that traverses any Scala collection by accumulating its entries + * into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The collection to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamAccumulated(IterableOnce coll) { + return StreamConverters.asJavaSeqLongStream((IterableOnce)(Object)scala.jdk.LongAccumulator$.MODULE$.fromSpecific((IterableOnce)(Object)coll)); + } + + /** + * Generates a LongStream that traverses the keys of any Scala map by + * accumulating those keys into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing keys to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamAccumulatedKeys(Map coll) { + return StreamConverters.asJavaSeqLongStream((IterableOnce)(Object)scala.jdk.LongAccumulator$.MODULE$.fromSpecific((IterableOnce)(Object)coll.keysIterator())); + } + + /** + * Generates a LongStream that traverses the values of any Scala map by + * accumulating those values into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing values to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamAccumulatedValues(Map coll) { + return StreamConverters.asJavaSeqLongStream((IterableOnce)(Object)scala.jdk.LongAccumulator$.MODULE$.fromSpecific((IterableOnce)(Object)coll.valuesIterator())); + } +} diff --git a/src/main/java-2.13-/scala/compat/java8/ScalaStreamSupport.java b/src/main/java-2.13-/scala/compat/java8/ScalaStreamSupport.java new file mode 100644 index 0000000..0aa976b --- /dev/null +++ b/src/main/java-2.13-/scala/compat/java8/ScalaStreamSupport.java @@ -0,0 +1,927 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8; + +import scala.compat.java8.converterImpl.*; +import scala.compat.java8.collectionImpl.*; +import java.util.stream.*; +import scala.compat.java8.runtime.CollectionInternals; + +/** + * This class contains static utility methods for creating Java Streams from Scala Collections, similar + * to the methods in {@code java.util.stream.StreamSupport} for other Java types. It is intended for + * use from Java code. In Scala code, you can use the extension methods provided by + * {@code scala.compat.java8.StreamConverters} instead. + * + * Streams created from immutable Scala collections are also immutable. Mutable collections should + * not be modified concurrently. There are no guarantees for success or failure modes of existing + * streams in case of concurrent modifications. + */ +public class ScalaStreamSupport { + ///////////////////// + // Generic Streams // + ///////////////////// + + /** + * Generates a Stream that traverses an IndexedSeq. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The IndexedSeq to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream stream(scala.collection.IndexedSeq coll) { + return StreamSupport.stream(new StepsAnyIndexedSeq(coll, 0, coll.length()), false); + } + + /** + * Generates a Stream that traverses the keys of a scala.collection.immutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashMap to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamKeys(scala.collection.immutable.HashMap coll) { + return StreamSupport.stream(new StepsAnyImmHashMapKey(coll, 0, coll.size()), false); + } + + /** + * Generates a Stream that traverses the values of a scala.collection.immutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashMap to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamValues(scala.collection.immutable.HashMap coll) { + return StreamSupport.stream(new StepsAnyImmHashMapValue(coll, 0, coll.size()), false); + } + + /** + * Generates a Stream that traverses the key-value pairs of a scala.collection.immutable.HashMap. + * The key-value pairs are presented as instances of scala.Tuple2. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashMap to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream< scala.Tuple2 > stream(scala.collection.immutable.HashMap coll) { + return StreamSupport.stream(new StepsAnyImmHashMap(coll, 0, coll.size()), false); + } + + /** + * Generates a Stream that traverses a scala.collection.immutable.HashSet. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashSet to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream stream(scala.collection.immutable.HashSet coll) { + return StreamSupport.stream(new StepsAnyImmHashSet(coll.iterator(), coll.size()), false); + } + + /** + * Generates a Stream that traverses the keys of a scala.collection.mutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashMap to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamKeys(scala.collection.mutable.HashMap coll) { + scala.collection.mutable.HashEntry[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.stream(new StepsAnyHashTableKey(tbl, 0, tbl.length), false); + } + + /** + * Generates a Stream that traverses the values of a scala.collection.mutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashMap to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamValues(scala.collection.mutable.HashMap coll) { + scala.collection.mutable.HashEntry[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.stream(new StepsAnyDefaultHashTableValue(tbl, 0, tbl.length), false); + } + + /** + * Generates a Stream that traverses the key-value pairs of a scala.collection.mutable.HashMap. + * The key-value pairs are presented as instances of scala.Tuple2. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashMap to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream< scala.Tuple2 > stream(scala.collection.mutable.HashMap coll) { + scala.collection.mutable.HashEntry< K, scala.collection.mutable.DefaultEntry >[] tbl = + CollectionInternals.getTable(coll); + return StreamSupport.stream(new StepsAnyDefaultHashTable(tbl, 0, tbl.length), false); + } + + /** + * Generates a Stream that traverses a scala.collection.mutable.HashSet. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashSet to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream stream(scala.collection.mutable.HashSet coll) { + Object[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.stream(new StepsAnyFlatHashTable(tbl, 0, tbl.length), false); + } + + /** + * Generates a Stream that traverses a scala.collection.immutable.Vector. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The Vector to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream stream(scala.collection.immutable.Vector coll) { + return StreamSupport.stream(new StepsAnyVector(coll, 0, coll.length()), false); + } + + /** + * Generates a Stream that traverses the keys of a scala.collection.Map. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the streamAccumulatedKeys method instead, but + * note that this creates a new collection containing the Map's keys. + * + * @param coll The Map to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamKeys(scala.collection.Map coll) { + return StreamSupport.stream(new StepsAnyIterator(coll.keysIterator()), false); + } + + /** + * Generates a Stream that traverses the values of a scala.collection.Map. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the streamAccumulatedValues method instead, but + * note that this creates a new collection containing the Map's values. + * + * @param coll The Map to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamValues(scala.collection.Map coll) { + return StreamSupport.stream(new StepsAnyIterator(coll.valuesIterator()), false); + } + + /** + * Generates a Stream that traverses the key-value pairs of a scala.collection.Map. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the streamAccumulated method instead, but + * note that this creates a new collection containing the Map's key-value pairs. + * + * @param coll The Map to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream< scala.Tuple2 > stream(scala.collection.Map coll) { + return StreamSupport.stream(new StepsAnyIterator< scala.Tuple2 >(coll.iterator()), false); + } + + /** + * Generates a Stream that traverses a scala.collection.Iterator. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the streamAccumulated method instead, + * but note that this creates a copy of the contents of the Iterator. + * + * @param coll The scala.collection.Iterator to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream stream(scala.collection.Iterator coll) { + return StreamSupport.stream(new StepsAnyIterator(coll), false); + } + + /** + * Generates a Stream that traverses a scala.collection.Iterable. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the streamAccumulated method instead, + * but note that this creates a copy of the contents of the Iterable + * + * @param coll The scala.collection.Iterable to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream stream(scala.collection.Iterable coll) { + return StreamSupport.stream(new StepsAnyIterator(coll.iterator()), false); + } + + /** + * Generates a Stream that traverses any Scala collection by accumulating its entries + * into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The collection to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamAccumulated(scala.collection.TraversableOnce coll) { + scala.compat.java8.collectionImpl.Accumulator acc = scala.compat.java8.collectionImpl.Accumulator.from(coll); + return StreamSupport.stream(acc.spliterator(), false); + } + + /** + * Generates a Stream that traverses the keys of any Scala map by + * accumulating those keys into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing keys to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamAccumulatedKeys(scala.collection.Map coll) { + scala.compat.java8.collectionImpl.Accumulator acc = scala.compat.java8.collectionImpl.Accumulator.from(coll.keysIterator()); + return StreamSupport.stream(acc.spliterator(), false); + } + + /** + * Generates a Stream that traverses the values of any Scala map by + * accumulating those values into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing values to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static Stream streamAccumulatedValues(scala.collection.Map coll) { + scala.compat.java8.collectionImpl.Accumulator acc = scala.compat.java8.collectionImpl.Accumulator.from(coll.valuesIterator()); + return StreamSupport.stream(acc.spliterator(), false); + } + + //////////////////// + // Double Streams // + //////////////////// + + /** + * Generates a DoubleStream that traverses an IndexedSeq of Doubles. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The IndexedSeq to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStream(scala.collection.IndexedSeq coll) { + return StreamSupport.doubleStream(new StepsDoubleIndexedSeq(coll, 0, coll.length()), false); + } + + /** + * Generates a DoubleStream that traverses double-valued keys of a scala.collection.immutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashMap to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamKeys(scala.collection.immutable.HashMap coll) { + return StreamSupport.doubleStream(new StepsDoubleImmHashMapKey(coll, 0, coll.size()), false); + } + + /** + * Generates a DoubleStream that traverses double-valued values of a scala.collection.immutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashMap to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamValues(scala.collection.immutable.HashMap coll) { + return StreamSupport.doubleStream(new StepsDoubleImmHashMapValue(coll, 0, coll.size()), false); + } + + /** + * Generates a DoubleStream that traverses a scala.collection.immutable.HashSet of Doubles. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashSet to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStream(scala.collection.immutable.HashSet coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.iterator(); + return StreamSupport.doubleStream(new StepsDoubleImmHashSet(iter, coll.size()), false); + } + + /** + * Generates a DoubleStream that traverses double-valued keys of a scala.collection.mutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashMap to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamKeys(scala.collection.mutable.HashMap coll) { + scala.collection.mutable.HashEntry[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.doubleStream(new StepsDoubleHashTableKey(tbl, 0, tbl.length), false); + } + + /** + * Generates a DoubleStream that traverses double-valued values of a scala.collection.mutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashMap to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamValues(scala.collection.mutable.HashMap coll) { + scala.collection.mutable.HashEntry[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.doubleStream(new StepsDoubleDefaultHashTableValue(tbl, 0, tbl.length), false); + } + + /** + * Generates a DoubleStream that traverses a scala.collection.mutable.HashSet of Doubles. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashSet to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStream(scala.collection.mutable.HashSet coll) { + Object[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.doubleStream(new StepsDoubleFlatHashTable(tbl, 0, tbl.length), false); + } + + /** + * Generates a DoubleStream that traverses a scala.collection.immutable.Vector of Doubles. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The Vector to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStream(scala.collection.immutable.Vector coll) { + scala.collection.immutable.Vector erased = (scala.collection.immutable.Vector)coll; + return StreamSupport.doubleStream(new StepsDoubleVector(erased, 0, coll.length()), false); + } + + /** + * Generates a DoubleStream that traverses the double-valued keys of a scala.collection.Map. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the doubleStreamAccumulatedKeys method instead, but + * note that this creates a new collection containing the Map's keys. + * + * @param coll The Map to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamKeys(scala.collection.Map coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.keysIterator(); + return StreamSupport.doubleStream(new StepsDoubleIterator(iter), false); + } + + /** + * Generates a DoubleStream that traverses the double-valued values of a scala.collection.Map. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the doubleStreamAccumulatedValues method instead, but + * note that this creates a new collection containing the Map's values. + * + * @param coll The Map to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamValues(scala.collection.Map coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.valuesIterator(); + return StreamSupport.doubleStream(new StepsDoubleIterator(iter), false); + } + + /** + * Generates a DoubleStream that traverses a double-valued scala.collection.Iterator. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the doubleStreamAccumulated method instead, + * but note that this creates a copy of the contents of the Iterator. + * + * @param coll The scala.collection.Iterator to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStream(scala.collection.Iterator coll) { + return StreamSupport.doubleStream(new StepsDoubleIterator((scala.collection.Iterator)coll), false); + } + + /** + * Generates a DoubleStream that traverses a double-valued scala.collection.Iterable. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the doubleStreamAccumulated method instead, + * but note that this creates a copy of the contents of the Iterable. + * + * @param coll The scala.collection.Iterable to traverse + * @return A DoubleStream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStream(scala.collection.Iterable coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.iterator(); + return StreamSupport.doubleStream(new StepsDoubleIterator(iter), false); + } + + /** + * Generates a Stream that traverses any Scala collection by accumulating its entries + * into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The collection to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamAccumulated(scala.collection.TraversableOnce coll) { + scala.compat.java8.collectionImpl.DoubleAccumulator acc = + scala.compat.java8.collectionImpl.DoubleAccumulator.from((scala.collection.TraversableOnce)coll); + return StreamSupport.doubleStream(acc.spliterator(), false); + } + + /** + * Generates a Stream that traverses the keys of any Scala map by + * accumulating those keys into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing keys to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamAccumulatedKeys(scala.collection.Map coll) { + scala.compat.java8.collectionImpl.DoubleAccumulator acc = + scala.compat.java8.collectionImpl.DoubleAccumulator.from((scala.collection.Iterator)coll.keysIterator()); + return StreamSupport.doubleStream(acc.spliterator(), false); + } + + /** + * Generates a Stream that traverses the values of any Scala map by + * accumulating those values into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing values to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static DoubleStream doubleStreamAccumulatedValues(scala.collection.Map coll) { + scala.compat.java8.collectionImpl.DoubleAccumulator acc = + scala.compat.java8.collectionImpl.DoubleAccumulator.from((scala.collection.Iterator)coll.valuesIterator()); + return StreamSupport.doubleStream(acc.spliterator(), false); + } + + ///////////////// + // Int Streams // + ///////////////// + + /** + * Generates a IntStream that traverses a BitSet. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The BitSet to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStream(scala.collection.BitSet coll) { + // Let the value class figure out the casting! + scala.compat.java8.converterImpl.RichBitSetCanStep rbscs = + new scala.compat.java8.converterImpl.RichBitSetCanStep(coll); + return StreamSupport.intStream(rbscs.stepper(StepperShape$.MODULE$.intStepperShape()), false); + } + + /** + * Generates a IntStream that traverses a Range. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The Range to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStream(scala.collection.immutable.Range coll) { + return StreamSupport.intStream(new scala.compat.java8.converterImpl.StepsIntRange(coll, 0, coll.length()), false); + } + + /** + * Generates a IntStream that traverses an IndexedSeq of Ints. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The IndexedSeq to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStream(scala.collection.IndexedSeq coll) { + return StreamSupport.intStream(new StepsIntIndexedSeq(coll, 0, coll.length()), false); + } + + /** + * Generates a IntStream that traverses int-valued keys of a scala.collection.immutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashMap to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamKeys(scala.collection.immutable.HashMap coll) { + return StreamSupport.intStream(new StepsIntImmHashMapKey(coll, 0, coll.size()), false); + } + + /** + * Generates a IntStream that traverses int-valued values of a scala.collection.immutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashMap to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamValues(scala.collection.immutable.HashMap coll) { + return StreamSupport.intStream(new StepsIntImmHashMapValue(coll, 0, coll.size()), false); + } + + /** + * Generates a IntStream that traverses a scala.collection.immutable.HashSet of Ints. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashSet to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStream(scala.collection.immutable.HashSet coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.iterator(); + return StreamSupport.intStream(new StepsIntImmHashSet(iter, coll.size()), false); + } + + /** + * Generates a IntStream that traverses int-valued keys of a scala.collection.mutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashMap to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamKeys(scala.collection.mutable.HashMap coll) { + scala.collection.mutable.HashEntry[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.intStream(new StepsIntHashTableKey(tbl, 0, tbl.length), false); + } + + /** + * Generates a IntStream that traverses int-valued values of a scala.collection.mutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashMap to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamValues(scala.collection.mutable.HashMap coll) { + scala.collection.mutable.HashEntry[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.intStream(new StepsIntDefaultHashTableValue(tbl, 0, tbl.length), false); + } + + /** + * Generates a IntStream that traverses a scala.collection.mutable.HashSet of Ints. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashSet to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStream(scala.collection.mutable.HashSet coll) { + Object[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.intStream(new StepsIntFlatHashTable(tbl, 0, tbl.length), false); + } + + /** + * Generates a IntStream that traverses a scala.collection.immutable.Vector of Ints. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The Vector to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStream(scala.collection.immutable.Vector coll) { + scala.collection.immutable.Vector erased = (scala.collection.immutable.Vector)coll; + return StreamSupport.intStream(new StepsIntVector(erased, 0, coll.length()), false); + } + + /** + * Generates a IntStream that traverses the int-valued keys of a scala.collection.Map. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the intStreamAccumulatedKeys method instead, but + * note that this creates a new collection containing the Map's keys. + * + * @param coll The Map to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamKeys(scala.collection.Map coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.keysIterator(); + return StreamSupport.intStream(new StepsIntIterator(iter), false); + } + + /** + * Generates a IntStream that traverses the int-valued values of a scala.collection.Map. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the intStreamAccumulatedValues method instead, but + * note that this creates a new collection containing the Map's values. + * + * @param coll The Map to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamValues(scala.collection.Map coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.valuesIterator(); + return StreamSupport.intStream(new StepsIntIterator(iter), false); + } + + /** + * Generates a IntStream that traverses a int-valued scala.collection.Iterator. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the intStreamAccumulated method instead, + * but note that this creates a copy of the contents of the Iterator. + * + * @param coll The scala.collection.Iterator to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStream(scala.collection.Iterator coll) { + return StreamSupport.intStream(new StepsIntIterator((scala.collection.Iterator)coll), false); + } + + /** + * Generates a IntStream that traverses a int-valued scala.collection.Iterable. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the intStreamAccumulated method instead, + * but note that this creates a copy of the contents of the Iterable. + * + * @param coll The scala.collection.Iterable to traverse + * @return A IntStream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStream(scala.collection.Iterable coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.iterator(); + return StreamSupport.intStream(new StepsIntIterator(iter), false); + } + + /** + * Generates a Stream that traverses any Scala collection by accumulating its entries + * into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The collection to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamAccumulated(scala.collection.TraversableOnce coll) { + scala.compat.java8.collectionImpl.IntAccumulator acc = + scala.compat.java8.collectionImpl.IntAccumulator.from((scala.collection.TraversableOnce)coll); + return StreamSupport.intStream(acc.spliterator(), false); + } + + /** + * Generates a Stream that traverses the keys of any Scala map by + * accumulating those keys into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing keys to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamAccumulatedKeys(scala.collection.Map coll) { + scala.compat.java8.collectionImpl.IntAccumulator acc = + scala.compat.java8.collectionImpl.IntAccumulator.from((scala.collection.Iterator)coll.keysIterator()); + return StreamSupport.intStream(acc.spliterator(), false); + } + + /** + * Generates a Stream that traverses the values of any Scala map by + * accumulating those values into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing values to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static IntStream intStreamAccumulatedValues(scala.collection.Map coll) { + scala.compat.java8.collectionImpl.IntAccumulator acc = + scala.compat.java8.collectionImpl.IntAccumulator.from((scala.collection.Iterator)coll.valuesIterator()); + return StreamSupport.intStream(acc.spliterator(), false); + } + + ////////////////// + // Long Streams // + ////////////////// + + /** + * Generates a LongStream that traverses an IndexedSeq of Longs. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The IndexedSeq to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStream(scala.collection.IndexedSeq coll) { + return StreamSupport.longStream(new StepsLongIndexedSeq(coll, 0, coll.length()), false); + } + + /** + * Generates a LongStream that traverses long-valued keys of a scala.collection.immutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashMap to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamKeys(scala.collection.immutable.HashMap coll) { + return StreamSupport.longStream(new StepsLongImmHashMapKey(coll, 0, coll.size()), false); + } + + /** + * Generates a LongStream that traverses long-valued values of a scala.collection.immutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashMap to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamValues(scala.collection.immutable.HashMap coll) { + return StreamSupport.longStream(new StepsLongImmHashMapValue(coll, 0, coll.size()), false); + } + + /** + * Generates a LongStream that traverses a scala.collection.immutable.HashSet of Longs. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The immutable.HashSet to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStream(scala.collection.immutable.HashSet coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.iterator(); + return StreamSupport.longStream(new StepsLongImmHashSet(iter, coll.size()), false); + } + + /** + * Generates a LongStream that traverses long-valued keys of a scala.collection.mutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashMap to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamKeys(scala.collection.mutable.HashMap coll) { + scala.collection.mutable.HashEntry[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.longStream(new StepsLongHashTableKey(tbl, 0, tbl.length), false); + } + + /** + * Generates a LongStream that traverses long-valued values of a scala.collection.mutable.HashMap. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashMap to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamValues(scala.collection.mutable.HashMap coll) { + scala.collection.mutable.HashEntry[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.longStream(new StepsLongDefaultHashTableValue(tbl, 0, tbl.length), false); + } + + /** + * Generates a LongStream that traverses a scala.collection.mutable.HashSet of Longs. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The mutable.HashSet to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStream(scala.collection.mutable.HashSet coll) { + Object[] tbl = CollectionInternals.getTable(coll); + return StreamSupport.longStream(new StepsLongFlatHashTable(tbl, 0, tbl.length), false); + } + + /** + * Generates a LongStream that traverses a scala.collection.immutable.Vector of Longs. + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The Vector to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStream(scala.collection.immutable.Vector coll) { + scala.collection.immutable.Vector erased = (scala.collection.immutable.Vector)coll; + return StreamSupport.longStream(new StepsLongVector(erased, 0, coll.length()), false); + } + + /** + * Generates a LongStream that traverses the long-valued keys of a scala.collection.Map. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the longStreamAccumulatedKeys method instead, but + * note that this creates a new collection containing the Map's keys. + * + * @param coll The Map to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamKeys(scala.collection.Map coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.keysIterator(); + return StreamSupport.longStream(new StepsLongIterator(iter), false); + } + + /** + * Generates a LongStream that traverses the long-valued values of a scala.collection.Map. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the longStreamAccumulatedValues method instead, but + * note that this creates a new collection containing the Map's values. + * + * @param coll The Map to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamValues(scala.collection.Map coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.valuesIterator(); + return StreamSupport.longStream(new StepsLongIterator(iter), false); + } + + /** + * Generates a LongStream that traverses a long-valued scala.collection.Iterator. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the longStreamAccumulated method instead, + * but note that this creates a copy of the contents of the Iterator. + * + * @param coll The scala.collection.Iterator to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStream(scala.collection.Iterator coll) { + return StreamSupport.longStream(new StepsLongIterator((scala.collection.Iterator)coll), false); + } + + /** + * Generates a LongStream that traverses a long-valued scala.collection.Iterable. + *

+ * Only sequential operations will be efficient. + * For efficient parallel operation, use the longStreamAccumulated method instead, + * but note that this creates a copy of the contents of the Iterable. + * + * @param coll The scala.collection.Iterable to traverse + * @return A LongStream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStream(scala.collection.Iterable coll) { + scala.collection.Iterator iter = (scala.collection.Iterator)coll.iterator(); + return StreamSupport.longStream(new StepsLongIterator(iter), false); + } + + /** + * Generates a Stream that traverses any Scala collection by accumulating its entries + * into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The collection to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamAccumulated(scala.collection.TraversableOnce coll) { + scala.compat.java8.collectionImpl.LongAccumulator acc = + scala.compat.java8.collectionImpl.LongAccumulator.from((scala.collection.TraversableOnce)coll); + return StreamSupport.longStream(acc.spliterator(), false); + } + + /** + * Generates a Stream that traverses the keys of any Scala map by + * accumulating those keys into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing keys to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamAccumulatedKeys(scala.collection.Map coll) { + scala.compat.java8.collectionImpl.LongAccumulator acc = + scala.compat.java8.collectionImpl.LongAccumulator.from((scala.collection.Iterator)coll.keysIterator()); + return StreamSupport.longStream(acc.spliterator(), false); + } + + /** + * Generates a Stream that traverses the values of any Scala map by + * accumulating those values into a buffer class (Accumulator). + *

+ * Both sequential and parallel operations will be efficient. + * + * @param coll The map containing values to traverse + * @return A Stream view of the collection which, by default, executes sequentially. + */ + public static LongStream longStreamAccumulatedValues(scala.collection.Map coll) { + scala.compat.java8.collectionImpl.LongAccumulator acc = + scala.compat.java8.collectionImpl.LongAccumulator.from((scala.collection.Iterator)coll.valuesIterator()); + return StreamSupport.longStream(acc.spliterator(), false); + } +} diff --git a/src/main/java-2.13-/scala/compat/java8/runtime/CollectionInternals.java b/src/main/java-2.13-/scala/compat/java8/runtime/CollectionInternals.java new file mode 100644 index 0000000..330483c --- /dev/null +++ b/src/main/java-2.13-/scala/compat/java8/runtime/CollectionInternals.java @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.runtime; + +// No imports! All type names are fully qualified to avoid confusion! + +public class CollectionInternals { + public static Object[] getTable(scala.collection.mutable.FlatHashTable fht) { return fht.hashTableContents().table(); } + public static > scala.collection.mutable.HashEntry[] getTable(scala.collection.mutable.HashTable ht) { return ht.hashTableContents().table(); } + public static boolean getDirt(scala.collection.immutable.Vector v) { return v.dirty(); } + public static Object[] getDisplay0(scala.collection.immutable.Vector v) { return v.display0(); } + public static Object[] getDisplay0(scala.collection.immutable.VectorIterator p) { return p.display0(); } + public static Object[] getDisplay1(scala.collection.immutable.Vector v) { return v.display1(); } + public static Object[] getDisplay1(scala.collection.immutable.VectorIterator p) { return p.display1(); } + public static Object[] getDisplay2(scala.collection.immutable.Vector v) { return v.display2(); } + public static Object[] getDisplay2(scala.collection.immutable.VectorIterator p) { return p.display2(); } + public static Object[] getDisplay3(scala.collection.immutable.Vector v) { return v.display3(); } + public static Object[] getDisplay3(scala.collection.immutable.VectorIterator p) { return p.display3(); } + public static Object[] getDisplay4(scala.collection.immutable.Vector v) { return v.display4(); } + public static Object[] getDisplay4(scala.collection.immutable.VectorIterator p) { return p.display4(); } + public static Object[] getDisplay5(scala.collection.immutable.Vector v) { return v.display5(); } + public static Object[] getDisplay5(scala.collection.immutable.VectorIterator p) { return p.display5(); } + public static scala.Tuple2< scala.Tuple2< scala.collection.Iterator, Object >, scala.collection.Iterator > trieIteratorSplit(scala.collection.Iterator it) { + if (it instanceof scala.collection.immutable.TrieIterator) { + scala.collection.immutable.TrieIterator trie = (scala.collection.immutable.TrieIterator)it; + return trie.split(); + } + return null; + } + public static long[] getBitSetInternals(scala.collection.mutable.BitSet bitSet) { return bitSet.elems(); } +} + diff --git a/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveDoubleWrapper.java b/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveDoubleWrapper.java index 7c27293..0007ab6 100644 --- a/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveDoubleWrapper.java +++ b/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveDoubleWrapper.java @@ -1,6 +1,15 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8.wrappers; public class IteratorPrimitiveDoubleWrapper implements java.util.PrimitiveIterator.OfDouble { diff --git a/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveIntWrapper.java b/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveIntWrapper.java index 330a41c..ac9d72b 100644 --- a/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveIntWrapper.java +++ b/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveIntWrapper.java @@ -1,6 +1,15 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8.wrappers; public class IteratorPrimitiveIntWrapper implements java.util.PrimitiveIterator.OfInt { diff --git a/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveLongWrapper.java b/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveLongWrapper.java index 9adff5f..e26e962 100644 --- a/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveLongWrapper.java +++ b/src/main/java/scala/compat/java8/wrappers/IteratorPrimitiveLongWrapper.java @@ -1,6 +1,15 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8.wrappers; public class IteratorPrimitiveLongWrapper implements java.util.PrimitiveIterator.OfLong { diff --git a/src/main/scala-2.11/scala/compat/java8/runtime/LambdaDeserializer.scala b/src/main/scala-2.11/scala/compat/java8/runtime/LambdaDeserializer.scala new file mode 100644 index 0000000..e3ad73a --- /dev/null +++ b/src/main/scala-2.11/scala/compat/java8/runtime/LambdaDeserializer.scala @@ -0,0 +1,145 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.runtime + +import java.lang.invoke._ + +/** + * This class is only intended to be called by synthetic `$deserializeLambda$` method that the + * Scala 2.11 compiler will add to classes hosting lambdas. In Scala 2.12+, it's part of the + * standard library. + * + * It is not intended to be consumed directly. + */ +object LambdaDeserializer { + /** + * Deserialize a lambda by calling `LambdaMetafactory.altMetafactory` to spin up a lambda class + * and instantiating this class with the captured arguments. + * + * A cache may be provided to ensure that subsequent deserialization of the same lambda expression + * is cheap, it amounts to a reflective call to the constructor of the previously created class. + * However, deserialization of the same lambda expression is not guaranteed to use the same class, + * concurrent deserialization of the same lambda expression may spin up more than one class. + * + * Assumptions: + * - No additional marker interfaces are required beyond `{java.io,scala.}Serializable`. These are + * not stored in `SerializedLambda`, so we can't reconstitute them. + * - No additional bridge methods are passed to `altMetafactory`. Again, these are not stored. + * + * @param lookup The factory for method handles. Must have access to the implementation method, the + * functional interface class, and `java.io.Serializable` or `scala.Serializable` as + * required. + * @param cache A cache used to avoid spinning up a class for each deserialization of a given lambda. May be `null` + * @param serialized The lambda to deserialize. Note that this is typically created by the `readResolve` + * member of the anonymous class created by `LambdaMetaFactory`. + * @return An instance of the functional interface + */ + def deserializeLambda(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = { + def slashDot(name: String) = name.replaceAll("/", ".") + val loader = lookup.lookupClass().getClassLoader + val implClass = loader.loadClass(slashDot(serialized.getImplClass)) + + def makeCallSite: CallSite = { + import serialized._ + def parseDescriptor(s: String) = + MethodType.fromMethodDescriptorString(s, loader) + + val funcInterfaceSignature = parseDescriptor(getFunctionalInterfaceMethodSignature) + val instantiated = parseDescriptor(getInstantiatedMethodType) + val functionalInterfaceClass = loader.loadClass(slashDot(getFunctionalInterfaceClass)) + + val implMethodSig = parseDescriptor(getImplMethodSignature) + // Construct the invoked type from the impl method type. This is the type of a factory + // that will be generated by the meta-factory. It is a method type, with param types + // coming form the types of the captures, and return type being the functional interface. + val invokedType: MethodType = { + // 1. Add receiver for non-static impl methods + val withReceiver = getImplMethodKind match { + case MethodHandleInfo.REF_invokeStatic | MethodHandleInfo.REF_newInvokeSpecial => + implMethodSig + case _ => + implMethodSig.insertParameterTypes(0, implClass) + } + // 2. Remove lambda parameters, leaving only captures. Note: the receiver may be a lambda parameter, + // such as in `Function s = Object::toString` + val lambdaArity = funcInterfaceSignature.parameterCount() + val from = withReceiver.parameterCount() - lambdaArity + val to = withReceiver.parameterCount() + + // 3. Drop the lambda return type and replace with the functional interface. + withReceiver.dropParameterTypes(from, to).changeReturnType(functionalInterfaceClass) + } + + // Lookup the implementation method + val implMethod: MethodHandle = try { + findMember(lookup, getImplMethodKind, implClass, getImplMethodName, implMethodSig) + } catch { + case e: ReflectiveOperationException => throw new IllegalArgumentException("Illegal lambda deserialization", e) + } + + val flags: Int = LambdaMetafactory.FLAG_SERIALIZABLE | LambdaMetafactory.FLAG_MARKERS + val isScalaFunction = functionalInterfaceClass.getName.startsWith("scala.Function") + val markerInterface: Class[_] = loader.loadClass(if (isScalaFunction) ScalaSerializable else JavaIOSerializable) + + LambdaMetafactory.altMetafactory( + lookup, getFunctionalInterfaceMethodName, invokedType, + + /* samMethodType = */ funcInterfaceSignature, + /* implMethod = */ implMethod, + /* instantiatedMethodType = */ instantiated, + /* flags = */ flags.asInstanceOf[AnyRef], + /* markerInterfaceCount = */ 1.asInstanceOf[AnyRef], + /* markerInterfaces[0] = */ markerInterface, + /* bridgeCount = */ 0.asInstanceOf[AnyRef] + ) + } + + val key = serialized.getImplMethodName + " : " + serialized.getImplMethodSignature + val factory: MethodHandle = if (cache == null) { + makeCallSite.getTarget + } else cache.get(key) match { + case null => + val callSite = makeCallSite + val temp = callSite.getTarget + cache.put(key, temp) + temp + case target => target + } + + val captures = Array.tabulate(serialized.getCapturedArgCount)(n => serialized.getCapturedArg(n)) + factory.invokeWithArguments(captures: _*) + } + + private val ScalaSerializable = "scala.Serializable" + + private val JavaIOSerializable = { + // We could actually omit this marker interface as LambdaMetaFactory will add it if + // the FLAG_SERIALIZABLE is set and of the provided markers extend it. But the code + // is cleaner if we uniformly add a single marker, so I'm leaving it in place. + "java.io.Serializable" + } + + private def findMember(lookup: MethodHandles.Lookup, kind: Int, owner: Class[_], + name: String, signature: MethodType): MethodHandle = { + kind match { + case MethodHandleInfo.REF_invokeStatic => + lookup.findStatic(owner, name, signature) + case MethodHandleInfo.REF_newInvokeSpecial => + lookup.findConstructor(owner, signature) + case MethodHandleInfo.REF_invokeVirtual | MethodHandleInfo.REF_invokeInterface => + lookup.findVirtual(owner, name, signature) + case MethodHandleInfo.REF_invokeSpecial => + lookup.findSpecial(owner, name, signature, owner) + } + } +} diff --git a/src/main/scala-2.13+/scala/compat/java8/StreamConverters.scala b/src/main/scala-2.13+/scala/compat/java8/StreamConverters.scala new file mode 100644 index 0000000..d977d9a --- /dev/null +++ b/src/main/scala-2.13+/scala/compat/java8/StreamConverters.scala @@ -0,0 +1,506 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import java.util.stream._ + +import scala.annotation.{implicitNotFound, unused} +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.StreamExtensions.{AccumulatorFactoryInfo, StreamShape, StreamUnboxer} +import scala.collection.{IterableOnce, Stepper, StepperShape} +import scala.compat.java8.converterImpl._ +import scala.jdk.CollectionConverters._ +import scala.jdk._ +import scala.language.implicitConversions + +/** Defines extension methods to create Java Streams for Scala collections, available through + * [[scala.compat.java8.StreamConverters]]. + */ +trait StreamExtensions { + implicit def richStepper[A](s: Stepper[A]): StepperExtensions[A] = new StepperExtensions[A](s) + + // collections + + implicit class IterableHasSeqStream[A](cc: IterableOnce[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def seqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = + s.fromStepper(cc.stepper, par = false) + } + + // Not `CC[X] <: IterableOnce[X]`, but `C` with an extra constraint, to support non-parametric classes like IntAccumulator + implicit class IterableNonGenericHasParStream[A, C <: IterableOnce[_]](c: C)(implicit ev: C <:< IterableOnce[A]) { + private type IterableOnceWithEfficientStepper = IterableOnce[A] { + def stepper[S <: Stepper[_]](implicit shape : StepperShape[A, S]) : S with EfficientSplit + } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def parStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[A, S, St], + st: StepperShape[A, St], + @implicitNotFound("`parStream` can only be called on collections where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: C <:< IterableOnceWithEfficientStepper): S = + s.fromStepper(ev(c).stepper, par = true) + } + + // maps + + implicit class MapHasSeqKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def seqKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[K, S, St], st: StepperShape[K, St]): S = + s.fromStepper(cc.keyStepper, par = false) + + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def seqValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[V, S, St], st: StepperShape[V, St]): S = + s.fromStepper(cc.valueStepper, par = false) + + // The seqStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def seqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[(K, V), S, St], st: StepperShape[(K, V), St]): S = + s.fromStepper(cc.stepper, par = false) + } + + + implicit class MapHasParKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + private type MapOpsWithEfficientKeyStepper = collection.MapOps[K, V, collection.Map, _] { def keyStepper[S <: Stepper[_]](implicit shape : StepperShape[K, S]) : S with EfficientSplit } + private type MapOpsWithEfficientValueStepper = collection.MapOps[K, V, collection.Map, _] { def valueStepper[S <: Stepper[_]](implicit shape : StepperShape[V, S]) : S with EfficientSplit } + private type MapOpsWithEfficientStepper = collection.MapOps[K, V, collection.Map, _] { def stepper[S <: Stepper[_]](implicit shape : StepperShape[(K, V), S]) : S with EfficientSplit } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def parKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[K, S, St], + st: StepperShape[K, St], + @implicitNotFound("parKeyStream can only be called on maps where `keyStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientKeyStepper): S = + s.fromStepper(cc.keyStepper, par = true) + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def parValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[V, S, St], + st: StepperShape[V, St], + @implicitNotFound("parValueStream can only be called on maps where `valueStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientValueStepper): S = + s.fromStepper(cc.valueStepper, par = true) + + // The parStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def parStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[(K, V), S, St], + st: StepperShape[(K, V), St], + @implicitNotFound("parStream can only be called on maps where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientStepper): S = + s.fromStepper(cc.stepper, par = true) + } + + // steppers + + implicit class StepperHasSeqStream[A](stepper: Stepper[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def seqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], @unused st: StepperShape[A, St]): S = + s.fromStepper(stepper.asInstanceOf[St], par = false) + } + + implicit class StepperHasParStream[A](stepper: Stepper[A] with EfficientSplit) { + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def parStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], @unused st: StepperShape[A, St]): S = + s.fromStepper(stepper.asInstanceOf[St], par = true) + } + + // arrays + // uses the JDK array spliterators (`DoubleArraySpliterator`). users can also call + // `array.stepper.seqStream`, which then uses the Scala steppers (`DoubleArrayStepper`). the + // steppers are also available on byte/short/char/float arrays (`WidenedByteArrayStepper`), + // JDK spliterators only for double/int/long/reference. + + implicit class DoubleArrayHasSeqParStream(a: Array[Double]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def seqStream: DoubleStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def parStream: DoubleStream = seqStream.parallel + } + + implicit class IntArrayHasSeqParStream(a: Array[Int]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def seqStream: IntStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def parStream: IntStream = seqStream.parallel + } + + implicit class LongArrayHasSeqParStream(a: Array[Long]) { + /** Create a sequential [[java.util.stream.LongStream Java LongStream]] for this array. */ + def seqStream: LongStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.LongStream Java LongStream]] for this array. */ + def parStream: LongStream = seqStream.parallel + } + + implicit class AnyArrayHasSeqParStream[A <: AnyRef](a: Array[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this array. */ + def seqStream: Stream[A] = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this array. */ + def parStream: Stream[A] = seqStream.parallel + } + + implicit class ByteArrayHasSeqParStream(a: Array[Byte]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def seqStream: IntStream = a.stepper.seqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def parStream: IntStream = a.stepper.parStream + } + + implicit class ShortArrayHasSeqParStream(a: Array[Short]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def seqStream: IntStream = a.stepper.seqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def parStream: IntStream = a.stepper.parStream + } + + implicit class CharArrayHasSeqParStream(a: Array[Char]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def seqStream: IntStream = a.stepper.seqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def parStream: IntStream = a.stepper.parStream + } + + implicit class FloatArrayHasSeqParStream(a: Array[Float]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def seqStream: DoubleStream = a.stepper.seqStream + + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def parStream: DoubleStream = a.stepper.parStream + } + + // toScala for streams + + implicit class StreamHasToScala[A](stream: Stream[A]) { + def accumulate: AnyAccumulator[A] = toScalaFactory(Accumulator) + + + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScalaFactory(Accumulator)` + * builds the result in parallel. + * + * A `toScalaFactory(Accumulator)` call automatically converts streams of boxed integers, longs or + * doubles are converted to the primitive accumulators ([[scala.jdk.IntAccumulator]], etc.). + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + private[java8] def toScalaFactory[C](factory: collection.Factory[A, C])(implicit info: AccumulatorFactoryInfo[A, C]): C = { + def anyAcc = stream.collect(AnyAccumulator.supplier[A], AnyAccumulator.adder[A], AnyAccumulator.merger[A]) + if (info.companion == AnyAccumulator) anyAcc.asInstanceOf[C] + else if (info.companion == IntAccumulator) stream.asInstanceOf[Stream[Int]].collect(IntAccumulator.supplier, IntAccumulator.boxedAdder, IntAccumulator.merger).asInstanceOf[C] + else if (info.companion == LongAccumulator) stream.asInstanceOf[Stream[Long]].collect(LongAccumulator.supplier, LongAccumulator.boxedAdder, LongAccumulator.merger).asInstanceOf[C] + else if (info.companion == DoubleAccumulator) stream.asInstanceOf[Stream[Double]].collect(DoubleAccumulator.supplier, DoubleAccumulator.boxedAdder, DoubleAccumulator.merger).asInstanceOf[C] + else if (stream.isParallel) anyAcc.to(factory) + else factory.fromSpecific(stream.iterator.asScala) + } + + /** + * Copy the elements of this stream into a Scala collection. + * + * For parallel streams, using [[accumulate]] is recommended as it builds the [[scala.jdk.Accumulator]] + * in parallel. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[CC[_]](implicit factory: collection.Factory[A, CC[A]]): CC[A] = { + if (stream.isParallel) toScalaFactory(Accumulator).to(factory) + else factory.fromSpecific(stream.iterator.asScala) + } + + /** Convert a generic Java Stream wrapping a primitive type to a corresponding primitive + * Stream. + */ + def unboxed[S](implicit unboxer: StreamUnboxer[A, S]): S = unboxer(stream) + } + + implicit class StreamIntHasAccumulatePrimitive(s: Stream[Int]) { + def accumulatePrimitive: IntAccumulator = s.toScalaFactory(Accumulator) + } + + implicit class StreamLongHasAccumulatePrimitive(s: Stream[Long]) { + def accumulatePrimitive: LongAccumulator = s.toScalaFactory(Accumulator) + } + + implicit class StreamDoubleHasAccumulatePrimitive(s: Stream[Double]) { + def accumulatePrimitive: DoubleAccumulator = s.toScalaFactory(Accumulator) + } + + implicit class StreamJIntegerHasAccumulatePrimitive(s: Stream[java.lang.Integer]) { + def accumulatePrimitive: IntAccumulator = s.toScalaFactory(Accumulator) + } + + implicit class StreamJLongHasAccumulatePrimitive(s: Stream[java.lang.Long]) { + def accumulatePrimitive: LongAccumulator = s.toScalaFactory(Accumulator) + } + + implicit class StreamJDoubleHasAccumulatePrimitive(s: Stream[java.lang.Double]) { + def accumulatePrimitive: DoubleAccumulator = s.toScalaFactory(Accumulator) + } + + implicit class IntStreamHasToScala(stream: IntStream) { + def accumulate: IntAccumulator = toScalaFactory(IntAccumulator) + + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScalaFactory(Accumulator)` + * builds the result in parallel. + * + * A `toScalaFactory(Accumulator)` call automatically converts the `IntStream` to a primitive + * [[scala.jdk.IntAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + private[java8] def toScalaFactory[C](factory: collection.Factory[Int, C])(implicit info: AccumulatorFactoryInfo[Int, C]): C = { + def intAcc = stream.collect(IntAccumulator.supplier, IntAccumulator.adder, IntAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Int], AnyAccumulator.unboxedIntAdder, AnyAccumulator.merger[Int]).asInstanceOf[C] + else if (info.companion == IntAccumulator) intAcc.asInstanceOf[C] + else if (stream.isParallel) intAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Int]].asScala) + } + + /** + * Copy the elements of this stream into a Scala collection. + * + * For parallel streams, using [[accumulate]] is recommended as it builds the [[scala.jdk.IntAccumulator]] + * in parallel. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[CC[_]](implicit factory: collection.Factory[Int, CC[Int]]): CC[Int] = { + if (stream.isParallel) toScalaFactory(IntAccumulator).to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Int]].asScala) + } + } + + implicit class LongStreamHasToScala(stream: LongStream) { + def accumulate: LongAccumulator = toScalaFactory(LongAccumulator) + + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScalaFactory(Accumulator)` + * builds the result in parallel. + * + * A `toScalaFactory(Accumulator)` call automatically converts the `LongStream` to a primitive + * [[scala.jdk.LongAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + private[java8] def toScalaFactory[C](factory: collection.Factory[Long, C])(implicit info: AccumulatorFactoryInfo[Long, C]): C = { + def longAcc = stream.collect(LongAccumulator.supplier, LongAccumulator.adder, LongAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Long], AnyAccumulator.unboxedLongAdder, AnyAccumulator.merger[Long]).asInstanceOf[C] + else if (info.companion == LongAccumulator) longAcc.asInstanceOf[C] + else if (stream.isParallel) longAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Long]].asScala) + } + + /** + * Copy the elements of this stream into a Scala collection. + * + * For parallel streams, using [[accumulate]] is recommended as it builds the [[scala.jdk.LongAccumulator]] + * in parallel. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[CC[_]](implicit factory: collection.Factory[Long, CC[Long]]): CC[Long] = { + if (stream.isParallel) toScalaFactory(LongAccumulator).to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Long]].asScala) + } + } + + implicit class DoubleStreamHasToScala(stream: DoubleStream) { + def accumulate: DoubleAccumulator = toScalaFactory(DoubleAccumulator) + + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScalaFactory(Accumulator)` + * builds the result in parallel. + * + * A `toScalaFactory(Accumulator)` call automatically converts the `DoubleStream` to a primitive + * [[scala.jdk.DoubleAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + private[java8] def toScalaFactory[C](factory: collection.Factory[Double, C])(implicit info: AccumulatorFactoryInfo[Double, C]): C = { + def doubleAcc = stream.collect(DoubleAccumulator.supplier, DoubleAccumulator.adder, DoubleAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Double], AnyAccumulator.unboxedDoubleAdder, AnyAccumulator.merger[Double]).asInstanceOf[C] + else if (info.companion == DoubleAccumulator) doubleAcc.asInstanceOf[C] + else if (stream.isParallel) doubleAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Double]].asScala) + } + + /** + * Copy the elements of this stream into a Scala collection. + * + * For parallel streams, using [[accumulate]] is recommended as it builds the [[scala.jdk.DoubleAccumulator]] + * in parallel. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[CC[_]](implicit factory: collection.Factory[Double, CC[Double]]): CC[Double] = { + if (stream.isParallel) toScalaFactory(DoubleAccumulator).to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Double]].asScala) + } + } +} + +/** `StreamConverters` provides extension methods and other functionality to + * ease interoperability of Scala collections with `java.util.stream` classes. + * + * Scala collections gain extension methods `seqStream` and + * `parStream` that allow them to be used as the source of a `Stream`. + * Some collections either intrinsically cannot be paralellized, or + * could be but an efficient implementation is missing. It this case, + * only `seqStream` is provided. If a collection cannot be stepped over + * at all (e.g. `Traversable`), then it gains neither method. + * + * `Array` also gains `seqStream` and `parStream` methods, and calling those + * on `Array[Double]`, `Array[Int]`, or `Array[Long]` will produce the + * corresponding primitive stream. + * + * Streams gain `accumulate` and `toScala[_]` methods, which collect the stream + * into a custom high-performance `scala.collection.mutable.java8.Accumulator`, + * which is not part of the standard collections hierarchy, or into a named + * Scala collection, respectively. + * + * Generic streams also gain an `unboxed` method that will convert to the + * corresponding unboxed primitive stream, if appropriate. Unboxed streams + * have custom accumulators with improved performance. + * + * Accumulators have `toArray`, `toList`, `iterator`, and `to[_]` methods + * to convert to standard Scala collections. Note that if you wish to + * create an array from a `Stream`, going through an `Accumulator` is + * not the most efficient option: just create the `Array` directly. + * + * Internally, Scala collections implement a hybrid of `Iterator` and + * `java.util.Spliterator` to implement `Stream` compatibility; these + * are called `Stepper`s. In particular, they can test for the presence + * of a next element using `hasStep`, can retrieve the next value with + * `nextStep`, or can optionally retrieve and operate on a value if present + * with `tryStep`, which works like `tryAdvance` in `java.util.Spliterator`. + * + * Every Scala collection that can be stepped + * through has a `stepper` method implicitly provided. In addition, + * maps have `keyStepper` and `valueStepper` methods. A limited number + * of collections operations are defined on `Stepper`s, including conversion + * to Scala collections with `to` or accumulation via `accumulate`. + * `Stepper`s also implement `seqStream` and `parStream` to generate `Stream`s. + * These are provided regardless of whether a `Stepper` can efficiently + * subdivide itself for parallel processing (though one can check for the + * presence of the `EfficientSubstep` trait to know that parallel execution will + * not be limited by long sequential searching steps, and one can call + * `anticipateParallelism` to warn a `Stepper` that it will be used in a parallel + * context and thus may wish to make different tradeoffs). + * + * Examples: + * {{{ + * import scala.compat.java8.StreamConverters._ + * + * val s = Vector(1,2,3,4).parStream // Stream[Int] + * val si = s.unboxed // Stream.OfInt + * val ai = si.accumulate // IntAccumulator + * val v = ai.to[Vector] // Vector[Int] again + * + * val t = Array(2.0, 3.0, 4.0).parStream // DoubleStream + * val q = t.toScala[scala.collection.immutable.Queue] // Queue[Double] + * + * val x = List(1L, 2L, 3L, 4L).stepper.parStream.sum // 10, potentially computed in parallel + * }}} + */ +object StreamConverters +extends StreamExtensions +with converterImpl.Priority1AccumulatorConverters +{ + implicit def richIntStepper(s: Stepper[Int]): StepperExtensions[Int] = new StepperExtensions[Int](s) + implicit def richLongStepper(s: Stepper[Long]): StepperExtensions[Long] = new StepperExtensions[Long](s) + implicit def richDoubleStepper(s: Stepper[Double]): StepperExtensions[Double] = new StepperExtensions[Double](s) +} diff --git a/src/main/scala-2.13+/scala/compat/java8/collectionImpl/package.scala b/src/main/scala-2.13+/scala/compat/java8/collectionImpl/package.scala new file mode 100644 index 0000000..ea891ab --- /dev/null +++ b/src/main/scala-2.13+/scala/compat/java8/collectionImpl/package.scala @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +package object collectionImpl { + type Accumulator[A] = scala.jdk.AnyAccumulator[A] + val Accumulator = scala.jdk.AnyAccumulator + + type IntAccumulator = scala.jdk.IntAccumulator + val IntAccumulator = scala.jdk.IntAccumulator + + type LongAccumulator = scala.jdk.LongAccumulator + val LongAccumulator = scala.jdk.LongAccumulator + + type DoubleAccumulator = scala.jdk.DoubleAccumulator + val DoubleAccumulator = scala.jdk.DoubleAccumulator + + type Stepper[A] = scala.collection.Stepper[A] + val Stepper = scala.collection.Stepper + + type AnyStepper[A] = scala.collection.AnyStepper[A] + val AnyStepper = scala.collection.AnyStepper + + type IntStepper = scala.collection.IntStepper + val IntStepper = scala.collection.IntStepper + + type LongStepper = scala.collection.LongStepper + val LongStepper = scala.collection.LongStepper + + type DoubleStepper = scala.collection.DoubleStepper + val DoubleStepper = scala.collection.DoubleStepper +} diff --git a/src/main/scala-2.13+/scala/compat/java8/converterImpl/Accumulates.scala b/src/main/scala-2.13+/scala/compat/java8/converterImpl/Accumulates.scala new file mode 100644 index 0000000..bb49435 --- /dev/null +++ b/src/main/scala-2.13+/scala/compat/java8/converterImpl/Accumulates.scala @@ -0,0 +1,47 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ + +final class CollectionCanAccumulate[A](private val underlying: IterableOnce[A]) extends AnyVal { + def accumulate: Accumulator[A] = underlying.iterator.to(Accumulator) +} + +final class AccumulateDoubleCollection(private val underlying: IterableOnce[Double]) extends AnyVal { + def accumulate: DoubleAccumulator = underlying.iterator.to(DoubleAccumulator) +} + +final class AccumulateIntCollection(private val underlying: IterableOnce[Int]) extends AnyVal { + def accumulate: IntAccumulator = underlying.iterator.to(IntAccumulator) +} + +final class AccumulateLongCollection(private val underlying: IterableOnce[Long]) extends AnyVal { + def accumulate: LongAccumulator = underlying.iterator.to(LongAccumulator) +} + +final class AccumulateAnyArray[A](private val underlying: Array[A]) extends AnyVal { + def accumulate: Accumulator[A] = underlying.to(Accumulator) +} + +final class AccumulateDoubleArray(private val underlying: Array[Double]) extends AnyVal { + def accumulate: DoubleAccumulator = underlying.to(DoubleAccumulator) +} + +final class AccumulateIntArray(private val underlying: Array[Int]) extends AnyVal { + def accumulate: IntAccumulator = underlying.to(IntAccumulator) +} + +final class AccumulateLongArray(private val underlying: Array[Long]) extends AnyVal { + def accumulate: LongAccumulator = underlying.to(LongAccumulator) +} diff --git a/src/main/scala-2.13+/scala/compat/java8/converterImpl/AccumulatorConverters.scala b/src/main/scala-2.13+/scala/compat/java8/converterImpl/AccumulatorConverters.scala new file mode 100644 index 0000000..dc40b40 --- /dev/null +++ b/src/main/scala-2.13+/scala/compat/java8/converterImpl/AccumulatorConverters.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.language.implicitConversions + +trait Priority3AccumulatorConverters { + implicit def collectionCanAccumulate[A](underlying: IterableOnce[A]): CollectionCanAccumulate[A] = + new CollectionCanAccumulate[A](underlying) +} + +trait Priority2AccumulatorConverters extends Priority3AccumulatorConverters { + implicit def accumulateDoubleCollection(underlying: IterableOnce[Double]): AccumulateDoubleCollection = + new AccumulateDoubleCollection(underlying) + implicit def accumulateIntCollection(underlying: IterableOnce[Int]): AccumulateIntCollection = + new AccumulateIntCollection(underlying) + implicit def accumulateLongCollection(underlying: IterableOnce[Long]): AccumulateLongCollection = + new AccumulateLongCollection(underlying) + implicit def accumulateAnyArray[A](underlying: Array[A]): AccumulateAnyArray[A] = + new AccumulateAnyArray(underlying) +} + +trait Priority1AccumulatorConverters extends Priority2AccumulatorConverters { + implicit def accumulateDoubleArray(underlying: Array[Double]): AccumulateDoubleArray = + new AccumulateDoubleArray(underlying) + implicit def accumulateIntArray(underlying: Array[Int]): AccumulateIntArray = + new AccumulateIntArray(underlying) + implicit def accumulateLongArray(underlying: Array[Long]): AccumulateLongArray = + new AccumulateLongArray(underlying) +} diff --git a/src/main/scala-2.13+/scala/compat/java8/converterImpl/StepperExtensions.scala b/src/main/scala-2.13+/scala/compat/java8/converterImpl/StepperExtensions.scala new file mode 100644 index 0000000..12ac05e --- /dev/null +++ b/src/main/scala-2.13+/scala/compat/java8/converterImpl/StepperExtensions.scala @@ -0,0 +1,89 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.collection.convert.StreamExtensions.AccumulatorFactoryInfo +import scala.compat.java8.collectionImpl.{DoubleAccumulator, IntAccumulator, LongAccumulator, Stepper} +import scala.jdk.AnyAccumulator + +class StepperExtensions[@specialized(Double, Int, Long) A](private val s: Stepper[A]) { + def accumulate[C](implicit info: AccumulatorFactoryInfo[A, C]): C = { + info.companion match { + case IntAccumulator => + val a = new IntAccumulator() + val is = s.asInstanceOf[Stepper[Int]] + while (is.hasStep) a += is.nextStep() + a.asInstanceOf[C] + case LongAccumulator => + val a = new LongAccumulator() + val is = s.asInstanceOf[Stepper[Long]] + while (is.hasStep) a += is.nextStep() + a.asInstanceOf[C] + case DoubleAccumulator => + val a = new DoubleAccumulator() + val is = s.asInstanceOf[Stepper[Double]] + while (is.hasStep) a += is.nextStep() + a.asInstanceOf[C] + case AnyAccumulator | null => + val a = new AnyAccumulator[A] + while (s.hasStep) a += s.nextStep() + a.asInstanceOf[C] + } + } + + def substep(): Stepper[A] = s.trySplit() + + /** Consumes all remaining elements in this `Stepper` and counts how many there are. + * This is a terminal operation. + */ + def count(): Long = { var n = 0L; while (s.hasStep) { s.nextStep(); n += 1 }; n } + + /** Consumes all remaining elements in this `Stepper` and counts how many satisfy condition `p`. + * This is a terminal operation. + */ + def count(p: A => Boolean): Long = { var n = 0L; while (s.hasStep) { if (p(s.nextStep())) n += 1 }; n } + + /** Searches for an element that satisfies condition `p`. If none are found, it returns `false`. + * This is a terminal operation. + */ + def exists(p: A => Boolean): Boolean = { while(s.hasStep) { if (p(s.nextStep())) return true }; false } + + /** Searches for an element that satisifes condition `p`, returning it wrapped in `Some` if one is found, or `None` otherwise. + * This is a terminal operation. + */ + def find(p: A => Boolean): Option[A] = { while (s.hasStep) { val a = s.nextStep(); if (p(a)) return Some(a) }; None } + + /** Repeatedly applies `op` to propagate an initial value `zero` through all elements of the collection. + * Traversal order is left-to-right. + * This is a terminal operation. + */ + def fold[@specialized(Double, Int, Long) B](zero: B)(op: (B, A) => B) = { var b = zero; while (s.hasStep) { b = op(b, s.nextStep()) }; b } + + /** Repeatedly applies `op` to propagate an initial value `zero` through the collection until a condition `p` is met. + * If `p` is never met, the result of the last operation is returned. + * This is a terminal operation. + */ + def foldTo[@specialized(Double, Int, Long) B](zero: B)(op: (B, A) => B)(p: B => Boolean) = { var b = zero; while (!p(b) && s.hasStep) { b = op(b, s.nextStep()) }; b } + + /** Applies `f` to every remaining element in the collection. + * This is a terminal operation. + */ + def foreach(f: A => Unit): Unit = { while (s.hasStep) f(s.nextStep()) } + + /** Repeatedly merges elements with `op` until only a single element remains. + * Throws an exception if the `Stepper` is empty. + * Merging occurs from left to right. + * This is a terminal operation. + */ + def reduce(op: (A, A) => A): A = { var a = s.nextStep(); while (s.hasStep) { a = op(a, s.nextStep()) }; a } +} diff --git a/src/main/scala-2.13+/scala/concurrent/java8/FuturesConvertersImplCompat.scala b/src/main/scala-2.13+/scala/concurrent/java8/FuturesConvertersImplCompat.scala new file mode 100644 index 0000000..2dc27fa --- /dev/null +++ b/src/main/scala-2.13+/scala/concurrent/java8/FuturesConvertersImplCompat.scala @@ -0,0 +1,20 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.concurrent.java8 + +import scala.concurrent.ExecutionContext + +// TODO: make this private[scala] when genjavadoc allows for that. +object FuturesConvertersImplCompat { + def InternalCallbackExecutor = ExecutionContext.parasitic +} diff --git a/src/main/scala-2.13-/scala/compat/java8/SpliteratorConverters.scala b/src/main/scala-2.13-/scala/compat/java8/SpliteratorConverters.scala new file mode 100644 index 0000000..b044b94 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/SpliteratorConverters.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import language.implicitConversions + +import java.util._ +import scala.compat.java8.collectionImpl._ + +package SpliteratorConverters { + class SpliteratorToStepper[A] private[java8] (private val underlying: Spliterator[A]) extends AnyVal { + def stepper: AnyStepper[A] = Stepper.ofSpliterator(underlying) + } + + trait Priority2SpliteratorConverters { + implicit def spliteratorToStepper[A](sp: Spliterator[A]) = new SpliteratorToStepper[A](sp) + } +} + + +package object SpliteratorConverters extends SpliteratorConverters.Priority2SpliteratorConverters { + implicit final class SpliteratorOfDoubleToStepper(private val underlying: Spliterator.OfDouble) extends AnyVal { + def stepper: DoubleStepper = Stepper.ofSpliterator(underlying) + } + implicit final class SpliteratorOfIntToStepper(private val underlying: Spliterator.OfInt) extends AnyVal { + def stepper: IntStepper = Stepper.ofSpliterator(underlying) + } + implicit final class SpliteratorOfLongToStepper(private val underlying: Spliterator.OfLong) extends AnyVal { + def stepper: LongStepper = Stepper.ofSpliterator(underlying) + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/StreamConverters.scala b/src/main/scala-2.13-/scala/compat/java8/StreamConverters.scala new file mode 100644 index 0000000..8d63d90 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/StreamConverters.scala @@ -0,0 +1,338 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import scala.language.higherKinds + +import java.util.stream._ +import scala.compat.java8.collectionImpl._ +import scala.compat.java8.converterImpl._ + +/** Classes or objects implementing this trait create streams suitable for sequential use */ +trait MakesSequentialStream[T, SS <: java.util.stream.BaseStream[_, SS]] extends Any { + def seqStream: SS +} + +/** Classes or objects implementing this trait create streams suitable for parallel use */ +trait MakesParallelStream[T, SS <: java.util.stream.BaseStream[_, SS]] extends Any { + def parStream: SS +} + +sealed trait StreamShape[T, S <: BaseStream[_, S]] { + def fromStepper (mk: MakesStepper[T, _], par: Boolean): S + def fromKeyStepper (mk: MakesKeyValueStepper[T, _, _], par: Boolean): S + def fromValueStepper(mk: MakesKeyValueStepper[_, T, _], par: Boolean): S +} +object StreamShape extends StreamShapeLowPriority { + // primitive + implicit val IntValue = intStreamShape[Int] + implicit val LongValue = longStreamShape[Long] + implicit val DoubleValue = doubleStreamShape[Double] + + // widening + implicit val ByteValue = intStreamShape[Byte] + implicit val ShortValue = intStreamShape[Short] + implicit val CharValue = intStreamShape[Char] + implicit val FloatValue = doubleStreamShape[Float] +} +trait StreamShapeLowPriority { + protected[this] abstract class BaseStreamShape[T, S <: BaseStream[_, S], St <: Stepper[_]](implicit ss: StepperShape[T, St]) extends StreamShape[T, S] { + final def fromStepper (mk: MakesStepper[T, _], par: Boolean): S = stream(mk.stepper, par) + final def fromKeyStepper (mk: MakesKeyValueStepper[T, _, _], par: Boolean): S = stream(mk.keyStepper, par) + final def fromValueStepper(mk: MakesKeyValueStepper[_, T, _], par: Boolean): S = stream(mk.valueStepper, par) + @inline private[this] def stream(st: St, par: Boolean): S = mkStream(if(par) st.anticipateParallelism else st, par) + protected[this] def mkStream(st: St, par: Boolean): S + } + protected[this] def intStreamShape[T](implicit ss: StepperShape[T, IntStepper]): StreamShape[T, IntStream] = new BaseStreamShape[T, IntStream, IntStepper] { + protected[this] def mkStream(st: IntStepper, par: Boolean): IntStream = StreamSupport.intStream(st, par) + } + protected[this] def longStreamShape[T](implicit ss: StepperShape[T, LongStepper]): StreamShape[T, LongStream] = new BaseStreamShape[T, LongStream, LongStepper] { + protected[this] def mkStream(st: LongStepper, par: Boolean): LongStream = StreamSupport.longStream(st, par) + } + protected[this] def doubleStreamShape[T](implicit ss: StepperShape[T, DoubleStepper]): StreamShape[T, DoubleStream] = new BaseStreamShape[T, DoubleStream, DoubleStepper] { + protected[this] def mkStream(st: DoubleStepper, par: Boolean): DoubleStream = StreamSupport.doubleStream(st, par) + } + + // reference + implicit def anyStreamShape[T]: StreamShape[T, Stream[T]] = new BaseStreamShape[T, Stream[T], AnyStepper[T]] { + protected[this] def mkStream(st: AnyStepper[T], par: Boolean): Stream[T] = StreamSupport.stream(st, par) + } +} + +trait PrimitiveStreamAccumulator[S, AA] { + def streamAccumulate(stream: S): AA +} + +trait PrimitiveStreamUnboxer[A, S] { + def apply(boxed: Stream[A]): S +} + +trait Priority2StreamConverters { + implicit class EnrichAnySteppableWithParStream[A, S <: BaseStream[_, S], CC](cc: CC)(implicit steppize: CC => MakesStepper[A, EfficientSubstep], ss: StreamShape[A, S]) + extends MakesParallelStream[A, S] { + def parStream: S = ss.fromStepper(steppize(cc), true) + } + implicit class EnrichAnySteppableWithSeqStream[A, S <: BaseStream[_, S], CC](cc: CC)(implicit steppize: CC => MakesStepper[A, Any], ss: StreamShape[A, S]) + extends MakesSequentialStream[A, S] { + def seqStream: S = ss.fromStepper(steppize(cc), false) + } + implicit class EnrichAnySteppableWithParKeyStream[A, S <: BaseStream[_, S], CC](cc: CC)(implicit steppize: CC => MakesKeyValueStepper[A, _, EfficientSubstep], ss: StreamShape[A, S]) { + def parKeyStream: S = ss.fromKeyStepper(steppize(cc), true) + } + implicit class EnrichScalaCollectionWithSeqKeyStream[A, S <: BaseStream[_, S], CC](cc: CC)(implicit steppize: CC => MakesKeyValueStepper[A, _, Any], ss: StreamShape[A, S]) { + def seqKeyStream: S = ss.fromKeyStepper(steppize(cc), false) + } + implicit class EnrichAnySteppableWithParValueStream[A, S <: BaseStream[_, S], CC](cc: CC)(implicit steppize: CC => MakesKeyValueStepper[_, A, EfficientSubstep], ss: StreamShape[A, S]) { + def parValueStream: S = ss.fromValueStepper(steppize(cc), true) + } + implicit class EnrichScalaCollectionWithSeqValueStream[A, S <: BaseStream[_, S], CC](cc: CC)(implicit steppize: CC => MakesKeyValueStepper[_, A, Any], ss: StreamShape[A, S]) { + def seqValueStream: S = ss.fromValueStepper(steppize(cc), false) + } +} + +trait Priority1StreamConverters extends Priority2StreamConverters { + implicit class RichStream[A](stream: Stream[A]) { + def accumulate = stream.collect(Accumulator.supplier[A], Accumulator.adder[A], Accumulator.merger[A]) + + def toScala[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, A, Coll[A]]): Coll[A] = { + if (stream.isParallel) accumulate.to[Coll](cbf) + else { + val b = cbf() + stream.forEachOrdered(new java.util.function.Consumer[A]{ def accept(a: A): Unit = { b += a } }) + b.result() + } + } + + def unboxed[S](implicit ubx: PrimitiveStreamUnboxer[A, S]): S = ubx(stream) + } + + implicit class RichStreamCanAccumulatePrimitive[S](stream: S) { + def accumulatePrimitive[AA](implicit psa: PrimitiveStreamAccumulator[S, AA]) = psa.streamAccumulate(stream) + } +} + +/** `StreamConverters` provides extension methods and other functionality to + * ease interoperability of Scala collections with `java.util.stream` classes. + * + * Scala collections gain extension methods `seqStream` and + * `parStream` that allow them to be used as the source of a `Stream`. + * Some collections either intrinsically cannot be paralellized, or + * could be but an efficient implementation is missing. It this case, + * only `seqStream` is provided. If a collection cannot be stepped over + * at all (e.g. `Traversable`), then it gains neither method. + * + * `Array` also gains `seqStream` and `parStream` methods, and calling those + * on `Array[Double]`, `Array[Int]`, or `Array[Long]` will produce the + * corresponding primitive stream. + * + * Streams gain `accumulate` and `toScala[_]` methods, which collect the stream + * into a custom high-performance `scala.collection.mutable.java8.Accumulator`, + * which is not part of the standard collections hierarchy, or into a named + * Scala collection, respectively. + * + * Generic streams also gain an `unboxed` method that will convert to the + * corresponding unboxed primitive stream, if appropriate. Unboxed streams + * have custom accumulators with improved performance. + * + * Accumulators have `toArray`, `toList`, `iterator`, and `to[_]` methods + * to convert to standard Scala collections. Note that if you wish to + * create an array from a `Stream`, going through an `Accumulator` is + * not the most efficient option: just create the `Array` directly. + * + * Internally, Scala collections implement a hybrid of `Iterator` and + * `java.util.Spliterator` to implement `Stream` compatibility; these + * are called `Stepper`s. In particular, they can test for the presence + * of a next element using `hasStep`, can retrieve the next value with + * `nextStep`, or can optionally retrieve and operate on a value if present + * with `tryStep`, which works like `tryAdvance` in `java.util.Spliterator`. + * + * Every Scala collection that can be stepped + * through has a `stepper` method implicitly provided. In addition, + * maps have `keyStepper` and `valueStepper` methods. A limited number + * of collections operations are defined on `Stepper`s, including conversion + * to Scala collections with `to` or accumulation via `accumulate`. + * `Stepper`s also implement `seqStream` and `parStream` to generate `Stream`s. + * These are provided regardless of whether a `Stepper` can efficiently + * subdivide itself for parallel processing (though one can check for the + * presence of the `EfficientSubstep` trait to know that parallel execution will + * not be limited by long sequential searching steps, and one can call + * `anticipateParallelism` to warn a `Stepper` that it will be used in a parallel + * context and thus may wish to make different tradeoffs). + * + * Examples: + * {{{ + * import scala.compat.java8.StreamConverters._ + * + * val s = Vector(1,2,3,4).parStream // Stream[Int] + * val si = s.unboxed // Stream.OfInt + * val ai = si.accumulate // IntAccumulator + * val v = ai.to[Vector] // Vector[Int] again + * + * val t = Array(2.0, 3.0, 4.0).parStream // DoubleStream + * val q = t.toScala[scala.collection.immutable.Queue] // Queue[Double] + * + * val x = List(1L, 2L, 3L, 4L).stepper.parStream.sum // 10, potentially computed in parallel + * }}} + */ +object StreamConverters +extends Priority1StreamConverters +with converterImpl.Priority1StepConverters +with converterImpl.Priority1AccumulatorConverters +{ + implicit final class EnrichDoubleArrayWithStream(private val a: Array[Double]) + extends AnyVal with MakesSequentialStream[Double, DoubleStream] with MakesParallelStream[Double, DoubleStream] { + def seqStream: DoubleStream = java.util.Arrays.stream(a) + def parStream: DoubleStream = seqStream.parallel + } + + implicit final class EnrichIntArrayWithStream(private val a: Array[Int]) + extends AnyVal with MakesSequentialStream[Int, IntStream] with MakesParallelStream[Int, IntStream] { + def seqStream: IntStream = java.util.Arrays.stream(a) + def parStream: IntStream = seqStream.parallel + } + + implicit final class EnrichLongArrayWithStream(private val a: Array[Long]) + extends AnyVal with MakesSequentialStream[Long, LongStream] with MakesParallelStream[Long, LongStream] { + def seqStream: LongStream = java.util.Arrays.stream(a) + def parStream: LongStream = seqStream.parallel + } + + implicit final class EnrichDoubleWrappedArrayWithStream(private val a: collection.mutable.WrappedArray[Double]) + extends AnyVal with MakesSequentialStream[Double, DoubleStream] with MakesParallelStream[Double, DoubleStream] { + def seqStream: DoubleStream = java.util.Arrays.stream(a.array) + def parStream: DoubleStream = seqStream.parallel + } + + implicit final class EnrichIntWrappedArrayWithStream(private val a: collection.mutable.WrappedArray[Int]) + extends AnyVal with MakesSequentialStream[Int, IntStream] with MakesParallelStream[Int, IntStream] { + def seqStream: IntStream = java.util.Arrays.stream(a.array) + def parStream: IntStream = seqStream.parallel + } + + implicit final class EnrichLongWrappedArrayWithStream(private val a: collection.mutable.WrappedArray[Long]) + extends AnyVal with MakesSequentialStream[Long, LongStream] with MakesParallelStream[Long, LongStream] { + def seqStream: LongStream = java.util.Arrays.stream(a.array) + def parStream: LongStream = seqStream.parallel + } + + implicit val primitiveAccumulateDoubleStream = new PrimitiveStreamAccumulator[Stream[Double], DoubleAccumulator] { + def streamAccumulate(stream: Stream[Double]): DoubleAccumulator = + stream.collect(DoubleAccumulator.supplier, DoubleAccumulator.boxedAdder, DoubleAccumulator.merger) + } + + implicit val primitiveAccumulateDoubleStream2 = + primitiveAccumulateDoubleStream.asInstanceOf[PrimitiveStreamAccumulator[Stream[java.lang.Double], DoubleAccumulator]] + + implicit val primitiveUnboxDoubleStream = new PrimitiveStreamUnboxer[Double, DoubleStream] { + def apply(boxed: Stream[Double]): DoubleStream = + boxed.mapToDouble(new java.util.function.ToDoubleFunction[Double]{ def applyAsDouble(d: Double) = d }) + } + + implicit val primitiveUnboxDoubleStream2 = + primitiveUnboxDoubleStream.asInstanceOf[PrimitiveStreamUnboxer[java.lang.Double, DoubleStream]] + + implicit val primitiveAccumulateIntStream = new PrimitiveStreamAccumulator[Stream[Int], IntAccumulator] { + def streamAccumulate(stream: Stream[Int]): IntAccumulator = + stream.collect(IntAccumulator.supplier, IntAccumulator.boxedAdder, IntAccumulator.merger) + } + + implicit val primitiveAccumulateIntStream2 = + primitiveAccumulateIntStream.asInstanceOf[PrimitiveStreamAccumulator[Stream[java.lang.Integer], IntAccumulator]] + + implicit val primitiveUnboxIntStream = new PrimitiveStreamUnboxer[Int, IntStream] { + def apply(boxed: Stream[Int]): IntStream = + boxed.mapToInt(new java.util.function.ToIntFunction[Int]{ def applyAsInt(d: Int) = d }) + } + + implicit val primitiveUnboxIntStream2 = + primitiveUnboxIntStream.asInstanceOf[PrimitiveStreamUnboxer[java.lang.Integer, IntStream]] + + implicit val primitiveAccumulateLongStream = new PrimitiveStreamAccumulator[Stream[Long], LongAccumulator] { + def streamAccumulate(stream: Stream[Long]): LongAccumulator = + stream.collect(LongAccumulator.supplier, LongAccumulator.boxedAdder, LongAccumulator.merger) + } + + implicit val primitiveAccumulateLongStream2 = + primitiveAccumulateLongStream.asInstanceOf[PrimitiveStreamAccumulator[Stream[java.lang.Long], LongAccumulator]] + + implicit val primitiveUnboxLongStream = new PrimitiveStreamUnboxer[Long, LongStream] { + def apply(boxed: Stream[Long]): LongStream = + boxed.mapToLong(new java.util.function.ToLongFunction[Long]{ def applyAsLong(d: Long) = d }) + } + + implicit val primitiveUnboxLongStream2 = + primitiveUnboxLongStream.asInstanceOf[PrimitiveStreamUnboxer[java.lang.Long, LongStream]] + + implicit final class RichDoubleStream(private val stream: DoubleStream) extends AnyVal { + def accumulate = stream.collect(DoubleAccumulator.supplier, DoubleAccumulator.adder, DoubleAccumulator.merger) + + def toScala[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, Double, Coll[Double]]): Coll[Double] = { + if (stream.isParallel) accumulate.to[Coll](cbf) + else { + val b = cbf() + stream.forEachOrdered(new java.util.function.DoubleConsumer{ def accept(d: Double): Unit = { b += d } }) + b.result() + } + } + } + + implicit final class RichIntStream(private val stream: IntStream) extends AnyVal { + def accumulate = stream.collect(IntAccumulator.supplier, IntAccumulator.adder, IntAccumulator.merger) + + def toScala[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, Int, Coll[Int]]): Coll[Int] = { + if (stream.isParallel) accumulate.to[Coll](cbf) + else { + val b = cbf() + stream.forEachOrdered(new java.util.function.IntConsumer{ def accept(d: Int): Unit = { b += d } }) + b.result() + } + } + } + + implicit final class RichLongStream(private val stream: LongStream) extends AnyVal { + def accumulate = stream.collect(LongAccumulator.supplier, LongAccumulator.adder, LongAccumulator.merger) + + def toScala[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, Long, Coll[Long]]): Coll[Long] = { + if (stream.isParallel) accumulate.to[Coll](cbf) + else { + val b = cbf() + stream.forEachOrdered(new java.util.function.LongConsumer{ def accept(d: Long): Unit = { b += d } }) + b.result() + } + } + } + + implicit val accumulateDoubleStepper = new AccumulatesFromStepper[Double, DoubleAccumulator] { + def apply(stepper: Stepper[Double]) = { + val a = new DoubleAccumulator + while (stepper.hasStep) a += stepper.nextStep + a + } + } + + implicit val accumulateIntStepper = new AccumulatesFromStepper[Int, IntAccumulator] { + def apply(stepper: Stepper[Int]) = { + val a = new IntAccumulator + while (stepper.hasStep) a += stepper.nextStep + a + } + } + + implicit val accumulateLongStepper = new AccumulatesFromStepper[Long, LongAccumulator] { + def apply(stepper: Stepper[Long]) = { + val a = new LongAccumulator + while (stepper.hasStep) a += stepper.nextStep + a + } + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/collectionImpl/Accumulator.scala b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/Accumulator.scala new file mode 100644 index 0000000..1950291 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/Accumulator.scala @@ -0,0 +1,358 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.collectionImpl + +import scala.language.higherKinds + +/** An `Accumulator` is a low-level collection specialized for gathering + * elements in parallel and then joining them in order by merging Accumulators. + * Accumulators can contain more than `Int.MaxValue` elements. + */ +final class Accumulator[A] extends AccumulatorLike[A, Accumulator[A]] { self => + private[java8] var current: Array[AnyRef] = Accumulator.emptyAnyRefArray + private[java8] var history: Array[Array[AnyRef]] = Accumulator.emptyAnyRefArrayArray + private[java8] var cumul: Array[Long] = Accumulator.emptyLongArray + + private[java8] def cumulative(i: Int) = cumul(i) + + private def expand(): Unit = { + if (index > 0) { + if (hIndex >= history.length) hExpand() + history(hIndex) = current + cumul(hIndex) = (if (hIndex > 0) cumulative(hIndex-1) else 0) + index + hIndex += 1 + } + current = new Array[AnyRef](nextBlockSize) + index = 0 + } + + private def hExpand(): Unit = { + if (hIndex == 0) { + history = new Array[Array[AnyRef]](4) + cumul = new Array[Long](4) + } + else { + history = java.util.Arrays.copyOf(history, history.length << 1) + cumul = java.util.Arrays.copyOf(cumul, cumul.length << 1) + } + } + + /** Appends an element to this `Accumulator`. */ + final def +=(a: A): Unit = { + totalSize += 1 + if (index >= current.length) expand() + current(index) = a.asInstanceOf[AnyRef] + index += 1 + } + + /** Removes all elements from `that` and appends them to this `Accumulator`. */ + final def drain[A1 <: A](that: Accumulator[A1]): Unit = { + var h = 0 + var prev = 0L + var more = true + while (more && h < that.hIndex) { + val n = (that.cumulative(h) - prev).toInt + if (current.length - index >= n) { + System.arraycopy(that.history(h), 0, current, index, n) + prev = that.cumulative(h) + index += n + h += 1 + } + else more = false + } + if (h >= that.hIndex && current.length - index >= that.index) { + if (that.index > 0) System.arraycopy(that.current, 0, current, index, that.index) + index += that.index + } + else { + val slots = (if (index > 0) 1 else 0) + that.hIndex - h + if (hIndex + slots > history.length) { + val n = math.max(4, 1 << (32 - java.lang.Integer.numberOfLeadingZeros(1 + hIndex + slots))) + history = java.util.Arrays.copyOf(history, n) + cumul = java.util.Arrays.copyOf(cumul, n) + } + var pv = (if (hIndex > 0) cumulative(hIndex-1) else 0L) + if (index > 0) { + pv += index + cumul(hIndex) = pv + history(hIndex) = (if (index < (current.length >>> 3) && current.length > 32) java.util.Arrays.copyOf(current, index) else current) + hIndex += 1 + } + while (h < that.hIndex) { + pv += that.cumulative(h) - prev + prev = that.cumulative(h) + cumul(hIndex) = pv + history(hIndex) = that.history(h) + h += 1 + hIndex += 1 + } + index = that.index + current = that.current + } + totalSize += that.totalSize + that.clear + } + + override def clear(): Unit = { + super.clear() + current = Accumulator.emptyAnyRefArray + history = Accumulator.emptyAnyRefArrayArray + cumul = Accumulator.emptyLongArray + } + + /** Retrieves the `ix`th element. */ + final def apply(ix: Long): A = { + if (totalSize - ix <= index || hIndex == 0) current((ix - (totalSize - index)).toInt).asInstanceOf[A] + else { + val w = seekSlot(ix) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt).asInstanceOf[A] + } + } + + /** Retrieves the `ix`th element, using an `Int` index. */ + final def apply(i: Int): A = apply(i.toLong) + + /** Returns a `Stepper` over the contents of this `Accumulator`*/ + final def stepper: AnyStepper[A] = new AccumulatorStepper[A](this) + + /** Returns an `Iterator` over the contents of this `Accumulator`. */ + final def iterator = stepper.iterator + + /** Returns a `java.util.Spliterator` over the contents of this `Accumulator`*/ + final def spliterator: java.util.Spliterator[A] = stepper.spliterator + + /** Produces a sequential Java 8 Stream over the elements of this `Accumulator`*/ + final def seqStream: java.util.stream.Stream[A] = java.util.stream.StreamSupport.stream(spliterator, false) + + /** Produces a parallel Java 8 Stream over the elements of this `Accumulator`*/ + final def parStream: java.util.stream.Stream[A] = java.util.stream.StreamSupport.stream(spliterator, true) + + /** Copies the elements in this `Accumulator` into an `Array` */ + final def toArray(implicit tag: reflect.ClassTag[A]) = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for an array: "+totalSize.toString) + val a = new Array[A](totalSize.toInt) + var j = 0 + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val n = cumulative(h) - pv + pv = cumulative(h) + var i = 0 + while (i < n) { + a(j) = x(i).asInstanceOf[A] + i += 1 + j += 1 + } + h += 1 + } + var i = 0 + while (i < index) { + a(j) = current(i).asInstanceOf[A] + i += 1 + j += 1 + } + a + } + + /** Copies the elements in this `Accumulator` to a `List` */ + final def toList: List[A] = { + var ans: List[A] = Nil + var i = index - 1 + while (i >= 0) { + ans = current(i).asInstanceOf[A] :: ans + i -= 1 + } + var h = hIndex - 1 + while (h >= 0) { + val a = history(h) + i = (cumulative(h) - (if (h == 0) 0L else cumulative(h-1))).toInt - 1 + while (i >= 0) { + ans = a(i).asInstanceOf[A] :: ans + i -= 1 + } + h -= 1 + } + ans + } + + /** Copies the elements in this `Accumulator` to a specified collection. + * Usage example: `acc.to[Vector]` + */ + final def to[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, A, Coll[A]]): Coll[A] = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for a Scala collection: "+totalSize.toString) + val b = cbf() + b.sizeHint(totalSize.toInt) + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val n = cumulative(h) - pv + pv = cumulative(h) + var i = 0 + while (i < n) { + b += x(i).asInstanceOf[A] + i += 1 + } + h += 1 + } + var i = 0 + while (i < index) { + b += current(i).asInstanceOf[A] + i += 1 + } + b.result + } +} + +object Accumulator { + private val emptyAnyRefArray = new Array[AnyRef](0) + private val emptyAnyRefArrayArray = new Array[Array[AnyRef]](0) + private val emptyLongArray = new Array[Long](0) + + /** A `Supplier` of `Accumulator`s, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def supplier[A] = new java.util.function.Supplier[Accumulator[A]]{ def get: Accumulator[A] = new Accumulator[A] } + + /** A `BiConsumer` that adds an element to an `Accumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def adder[A] = new java.util.function.BiConsumer[Accumulator[A], A]{ def accept(ac: Accumulator[A], a: A): Unit = { ac += a } } + + /** A `BiConsumer` that merges `Accumulator`s, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def merger[A] = new java.util.function.BiConsumer[Accumulator[A], Accumulator[A]]{ def accept(a1: Accumulator[A], a2: Accumulator[A]): Unit = { a1 drain a2 } } + + /** Builds an `Accumulator` from any `TraversableOnce` */ + def from[A](source: TraversableOnce[A]) = { + val a = new Accumulator[A] + source.foreach(a += _) + a + } +} + +private[java8] class AccumulatorStepper[A](private val acc: Accumulator[A]) extends AnyStepper[A] { + import java.util.Spliterator._ + + private var h = 0 + private var i = 0 + private var a = if (acc.hIndex > 0) acc.history(0) else acc.current + private var n = if (acc.hIndex > 0) acc.cumulative(0) else acc.index + private var N = acc.totalSize + + private def duplicateSelf(limit: Long): AccumulatorStepper[A] = { + val ans = new AccumulatorStepper(acc) + ans.h = h + ans.i = i + ans.a = a + ans.n = n + ans.N = limit + ans + } + + private def loadMore(): Unit = { + h += 1 + if (h < acc.hIndex) { a = acc.history(h); n = acc.cumulative(h) - acc.cumulative(h-1) } + else { a = acc.current; n = acc.index } + i = 0 + } + + def characteristics() = ORDERED | SIZED | SUBSIZED + + def estimateSize = N + + def hasNext = N > 0 + + def next: A = + if (N <= 0) throw new NoSuchElementException("Next in empty Stepper") + else { + if (i >= n) loadMore() + val ans = a(i).asInstanceOf[A] + i += 1 + N -= 1 + ans + } + + // Overidden for efficiency + override def tryStep(f: A => Unit): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + f(a(i).asInstanceOf[A]) + i += 1 + N -= 1 + true + } + + // Overidden for efficiency + override def tryAdvance(f: java.util.function.Consumer[_ >: A]): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + f.accept(a(i).asInstanceOf[A]) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def foreach(f: A => Unit): Unit = { + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + f(a(i).asInstanceOf[A]) + i += 1 + } + N -= (n - i0) + } + } + + // Overridden for efficiency + override def forEachRemaining(f: java.util.function.Consumer[_ >: A]): Unit = { + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + f.accept(a(i).asInstanceOf[A]) + i += 1 + } + N -= (n - i0) + } + } + + def substep(): AnyStepper[A] = + if (N <= 1) null + else { + val half = (N >> 1) + val M = (if (h <= 0) 0L else acc.cumulative(h-1)) + i + val R = M + half + val ans = duplicateSelf(half) + if (h < acc.hIndex) { + val w = acc.seekSlot(R) + h = (w >>> 32).toInt + if (h < acc.hIndex) { + a = acc.history(h) + n = acc.cumulative(h) - (if (h > 0) acc.cumulative(h-1) else 0) + } + else { + a = acc.current + n = acc.index + } + i = (w & 0xFFFFFFFFL).toInt + } + else i += half.toInt + N -= half + ans + } + + override def toString = s"$h $i ${a.mkString("{",",","}")} $n $N" +} diff --git a/src/main/scala-2.13-/scala/compat/java8/collectionImpl/AccumulatorLike.scala b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/AccumulatorLike.scala new file mode 100644 index 0000000..c7e894f --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/AccumulatorLike.scala @@ -0,0 +1,53 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.collectionImpl + +/** An accumulator that works with Java 8 streams; it accepts elements of type `A`, + * is itself an `AC`. Accumulators can handle more than `Int.MaxValue` elements. + */ +trait AccumulatorLike[@specialized(Double, Int, Long) A, AC] { + private[java8] var index: Int = 0 + private[java8] var hIndex: Int = 0 + private[java8] var totalSize: Long = 0L + private[java8] def cumulative(i: Int): Long + + private[java8] def nextBlockSize: Int = { + if (totalSize < 32) 16 + else if (totalSize <= Int.MaxValue) { + val bit = (64 - java.lang.Long.numberOfLeadingZeros(totalSize)) + 1 << (bit - (bit >> 2)) + } + else 1 << 24 + } + + /** Size of the accumulated collection, as a `Long` */ + final def size = totalSize + + /** Remove all accumulated elements from this accumulator. */ + def clear(): Unit = { + index = 0 + hIndex = 0 + totalSize = 0L + } + + private[java8] def seekSlot(ix: Long): Long = { + var lo = -1 + var hi = hIndex + while (lo + 1 < hi) { + val m = (lo + hi) >>> 1 // Shift allows division-as-unsigned, prevents overflow + if (cumulative(m) > ix) hi = m + else lo = m + } + (hi.toLong << 32) | (if (hi==0) ix else (ix - cumulative(hi-1))).toInt + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/collectionImpl/DoubleAccumulator.scala b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/DoubleAccumulator.scala new file mode 100644 index 0000000..af70491 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/DoubleAccumulator.scala @@ -0,0 +1,353 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.collectionImpl + +import scala.language.higherKinds + +/** A `DoubleAccumulator` is a low-level collection specialized for gathering + * elements in parallel and then joining them in order by merging them. + * This is a manually specialized variant of `Accumulator` with no actual + * subclassing relationship with `Accumulator`. + */ +final class DoubleAccumulator extends AccumulatorLike[Double, DoubleAccumulator] { self => + private[java8] var current: Array[Double] = DoubleAccumulator.emptyDoubleArray + private[java8] var history: Array[Array[Double]] = DoubleAccumulator.emptyDoubleArrayArray + + private[java8] def cumulative(i: Int) = { val x = history(i); x(x.length-1).toLong } + + private def expand(): Unit = { + if (index > 0) { + current(current.length-1) = (if (hIndex > 0) { val x = history(hIndex-1); x(x.length-1) } else 0) + index + if (hIndex >= history.length) hExpand() + history(hIndex) = current + hIndex += 1 + } + current = new Array[Double](nextBlockSize+1) + index = 0 + } + + private def hExpand(): Unit = { + if (hIndex == 0) history = new Array[Array[Double]](4) + else history = java.util.Arrays.copyOf(history, history.length << 1) + } + + /** Appends an element to this `DoubleAccumulator`. */ + final def +=(a: Double): Unit = { + totalSize += 1 + if (index+1 >= current.length) expand() + current(index) = a + index += 1 + } + + /** Removes all elements from `that` and appends them to this `DoubleAccumulator`. */ + final def drain(that: DoubleAccumulator): Unit = { + var h = 0 + var prev = 0L + var more = true + while (more && h < that.hIndex) { + val cuml = that.cumulative(h) + val n = (cuml - prev).toInt + if (current.length - index - 1 >= n) { + System.arraycopy(that.history(h), 0, current, index, n) + prev = cuml + index += n + h += 1 + } + else more = false + } + if (h >= that.hIndex && current.length - index - 1>= that.index) { + if (that.index > 0) System.arraycopy(that.current, 0, current, index, that.index) + index += that.index + } + else { + val slots = (if (index > 0) 1 else 0) + that.hIndex - h + if (hIndex + slots > history.length) { + val n = math.max(4, 1 << (32 - java.lang.Integer.numberOfLeadingZeros(1 + hIndex + slots))) + history = java.util.Arrays.copyOf(history, n) + } + var pv = (if (hIndex > 0) cumulative(hIndex-1) else 0L) + if (index > 0) { + val x = + if (index < (current.length >>> 3) && current.length - 1 > 32) { + val ans = java.util.Arrays.copyOf(current, index + 1) + ans(ans.length - 1) = current(current.length - 1) + ans + } + else current + pv = pv + index + x(x.length - 1) = pv + history(hIndex) = x + hIndex += 1 + } + while (h < that.hIndex) { + val cuml = that.cumulative(h) + pv = pv + cuml - prev + prev = cuml + val x = that.history(h) + x(x.length - 1) = pv + history(hIndex) = x + h += 1 + hIndex += 1 + } + index = that.index + current = that.current + } + totalSize += that.totalSize + that.clear + } + + override def clear(): Unit = { + super.clear() + current = DoubleAccumulator.emptyDoubleArray + history = DoubleAccumulator.emptyDoubleArrayArray + } + + /** Retrieves the `ix`th element. */ + final def apply(ix: Long): Double = { + if (totalSize - ix <= index || hIndex == 0) current((ix - (totalSize - index)).toInt) + else { + val w = seekSlot(ix) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) + } + } + + /** Retrieves the `ix`th element, using an `Int` index. */ + final def apply(i: Int): Double = apply(i.toLong) + + /** Returns a `DoubleStepper` over the contents of this `DoubleAccumulator`. */ + final def stepper: DoubleStepper = new DoubleAccumulatorStepper(this) + + /** Returns an `Iterator` over the contents of this `DoubleAccumulator`. The `Iterator` is not specialized. */ + final def iterator = stepper.iterator + + /** Returns a `java.util.Spliterator.OfDouble` over the contents of this `DoubleAccumulator`*/ + final def spliterator: java.util.Spliterator.OfDouble = stepper + + /** Produces a sequential Java 8 `DoubleStream` over the elements of this `DoubleAccumulator`*/ + final def seqStream: java.util.stream.DoubleStream = java.util.stream.StreamSupport.doubleStream(spliterator, false) + + /** Produces a parallel Java 8 `DoubleStream` over the elements of this `DoubleAccumulator`*/ + final def parStream: java.util.stream.DoubleStream = java.util.stream.StreamSupport.doubleStream(spliterator, true) + + /** Copies the elements in this `DoubleAccumulator` into an `Array[Double]` */ + final def toArray = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for an array: "+totalSize.toString) + val a = new Array[Double](totalSize.toInt) + var j = 0 + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val cuml = x(x.length-1).toLong + val n = (cuml - pv).toInt + pv = cuml + System.arraycopy(x, 0, a, j, n) + j += n + h += 1 + } + System.arraycopy(current, 0, a, j, index) + j += index + a + } + + /** Copies the elements in this `DoubleAccumulator` to a `List` */ + final def toList: List[Double] = { + var ans: List[Double] = Nil + var i = index - 1 + while (i >= 0) { + ans = current(i) :: ans + i -= 1 + } + var h = hIndex - 1 + while (h >= 0) { + val a = history(h) + i = (cumulative(h) - (if (h == 0) 0L else cumulative(h-1))).toInt - 1 + while (i >= 0) { + ans = a(i) :: ans + i -= 1 + } + h -= 1 + } + ans + } + + /** Copies the elements in this `DoubleAccumulator` to a specified collection. + * Note that the target collection is not specialized. + * Usage example: `acc.to[Vector]` + */ + final def to[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, Double, Coll[Double]]): Coll[Double] = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for a Scala collection: "+totalSize.toString) + val b = cbf() + b.sizeHint(totalSize.toInt) + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val n = cumulative(h) - pv + pv = cumulative(h) + var i = 0 + while (i < n) { + b += x(i) + i += 1 + } + h += 1 + } + var i = 0 + while (i < index) { + b += current(i) + i += 1 + } + b.result + } +} +object DoubleAccumulator { + private val emptyDoubleArray = new Array[Double](0) + private val emptyDoubleArrayArray = new Array[Array[Double]](0) + + /** A `Supplier` of `DoubleAccumulator`s, suitable for use with `java.util.stream.DoubleStream`'s `collect` method. Suitable for `Stream[Double]` also. */ + def supplier = new java.util.function.Supplier[DoubleAccumulator]{ def get: DoubleAccumulator = new DoubleAccumulator } + + /** A `BiConsumer` that adds an element to an `Accumulator`, suitable for use with `java.util.stream.DoubleStream`'s `collect` method. */ + def adder = new java.util.function.ObjDoubleConsumer[DoubleAccumulator]{ def accept(ac: DoubleAccumulator, a: Double): Unit = { ac += a } } + + /** A `BiConsumer` that adds a boxed `Double` to an `DoubleAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def boxedAdder = new java.util.function.BiConsumer[DoubleAccumulator, Double]{ def accept(ac: DoubleAccumulator, a: Double): Unit = { ac += a } } + + /** A `BiConsumer` that merges `DoubleAccumulator`s, suitable for use with `java.util.stream.DoubleStream`'s `collect` method. Suitable for `Stream[Double]` also. */ + def merger = new java.util.function.BiConsumer[DoubleAccumulator, DoubleAccumulator]{ def accept(a1: DoubleAccumulator, a2: DoubleAccumulator): Unit = { a1 drain a2 } } + + /** Builds a `DoubleAccumulator` from any `Double`-valued `TraversableOnce` */ + def from[A](source: TraversableOnce[Double]) = { + val a = new DoubleAccumulator + source.foreach(a += _) + a + } +} + +private[java8] class DoubleAccumulatorStepper(private val acc: DoubleAccumulator) extends DoubleStepper { + import java.util.Spliterator._ + + private var h = 0 + private var i = 0 + private var a = if (acc.hIndex > 0) acc.history(0) else acc.current + private var n = if (acc.hIndex > 0) acc.cumulative(0) else acc.index + private var N = acc.totalSize + + private def duplicateSelf(limit: Long): DoubleAccumulatorStepper = { + val ans = new DoubleAccumulatorStepper(acc) + ans.h = h + ans.i = i + ans.a = a + ans.n = n + ans.N = limit + ans + } + + private def loadMore(): Unit = { + h += 1 + if (h < acc.hIndex) { a = acc.history(h); n = acc.cumulative(h) - acc.cumulative(h-1) } + else { a = acc.current; n = acc.index } + i = 0 + } + + def characteristics() = ORDERED | SIZED | SUBSIZED | NONNULL + + def estimateSize = N + + def hasNext = N > 0 + + def nextDouble: Double = + if (n <= 0) throw new NoSuchElementException("next on empty Stepper") + else { + if (i >= n) loadMore() + val ans = a(i) + i += 1 + N -= 1 + ans + } + + // Overridden for efficiency + override def tryStep(f: Double => Unit): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + f(a(i)) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def tryAdvance(f: java.util.function.DoubleConsumer): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + f.accept(a(i)) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def foreach(f: Double => Unit): Unit = { + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + f(a(i)) + i += 1 + } + N -= (n - i0) + } + } + + // Overridden for efficiency + override def forEachRemaining(f: java.util.function.DoubleConsumer): Unit = { + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + f.accept(a(i)) + i += 1 + } + N -= (n - i0) + } + } + + def substep(): DoubleStepper = + if (N <= 1) null + else { + val half = (N >> 1) + val M = (if (h <= 0) 0L else acc.cumulative(h-1)) + i + val R = M + half + val ans = duplicateSelf(half) + if (h < acc.hIndex) { + val w = acc.seekSlot(R) + h = (w >>> 32).toInt + if (h < acc.hIndex) { + a = acc.history(h) + n = acc.cumulative(h) - (if (h > 0) acc.cumulative(h-1) else 0) + } + else { + a = acc.current + n = acc.index + } + i = (w & 0xFFFFFFFFL).toInt + } + else i += half.toInt + N -= half + ans + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/collectionImpl/IntAccumulator.scala b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/IntAccumulator.scala new file mode 100644 index 0000000..f8599ef --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/IntAccumulator.scala @@ -0,0 +1,360 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.collectionImpl + +import scala.language.higherKinds + +/** A `IntAccumulator` is a low-level collection specialized for gathering + * elements in parallel and then joining them in order by merging them. + * This is a manually specialized variant of `Accumulator` with no actual + * subclassing relationship with `Accumulator`. + */ +final class IntAccumulator extends AccumulatorLike[Int, IntAccumulator] { self => + private[java8] var current: Array[Int] = IntAccumulator.emptyIntArray + private[java8] var history: Array[Array[Int]] = IntAccumulator.emptyIntArrayArray + + private[java8] def cumulative(i: Int) = { val x = history(i); x(x.length-2).toLong << 32 | (x(x.length-1)&0xFFFFFFFFL) } + + private def expand(): Unit = { + if (index > 0) { + val cuml = (if (hIndex > 0) cumulative(hIndex-1) else 0) + index + current(current.length-2) = (cuml >>> 32).toInt + current(current.length-1) = (cuml & 0xFFFFFFFFL).toInt + if (hIndex >= history.length) hExpand() + history(hIndex) = current + hIndex += 1 + } + current = new Array[Int](nextBlockSize+1) + index = 0 + } + + private def hExpand(): Unit = { + if (hIndex == 0) history = new Array[Array[Int]](4) + else history = java.util.Arrays.copyOf(history, history.length << 1) + } + + /** Appends an element to this `IntAccumulator`. */ + final def +=(a: Int): Unit = { + totalSize += 1 + if (index+2 >= current.length) expand() + current(index) = a + index += 1 + } + + /** Removes all elements from `that` and appends them to this `IntAccumulator`. */ + final def drain(that: IntAccumulator): Unit = { + var h = 0 + var prev = 0L + var more = true + while (more && h < that.hIndex) { + val cuml = that.cumulative(h) + val n = (cuml - prev).toInt + if (current.length - index - 2 >= n) { + System.arraycopy(that.history(h), 0, current, index, n) + prev = cuml + index += n + h += 1 + } + else more = false + } + if (h >= that.hIndex && current.length - index - 2 >= that.index) { + if (that.index > 0) System.arraycopy(that.current, 0, current, index, that.index) + index += that.index + } + else { + val slots = (if (index > 0) 1 else 0) + that.hIndex - h + if (hIndex + slots > history.length) { + val n = math.max(4, 1 << (32 - java.lang.Integer.numberOfLeadingZeros(1 + hIndex + slots))) + history = java.util.Arrays.copyOf(history, n) + } + var pv = (if (hIndex > 0) cumulative(hIndex-1) else 0L) + if (index > 0) { + val x = + if (index < (current.length >>> 3) && current.length - 1 > 32) { + val ans = java.util.Arrays.copyOf(current, index + 2) + ans(ans.length - 2) = current(current.length - 2) + ans(ans.length - 1) = current(current.length - 1) + ans + } + else current + pv = pv + index + x(x.length - 2) = (pv >>> 32).toInt + x(x.length - 1) = (pv & 0xFFFFFFFFL).toInt + history(hIndex) = x + hIndex += 1 + } + while (h < that.hIndex) { + val cuml = that.cumulative(h) + pv = pv + cuml - prev + prev = cuml + val x = that.history(h) + x(x.length - 2) = (pv >>> 32).toInt + x(x.length - 1) = (pv & 0xFFFFFFFFL).toInt + history(hIndex) = x + h += 1 + hIndex += 1 + } + index = that.index + current = that.current + } + totalSize += that.totalSize + that.clear + } + + override def clear(): Unit = { + super.clear() + current = IntAccumulator.emptyIntArray + history = IntAccumulator.emptyIntArrayArray + } + + /** Retrieves the `ix`th element. */ + final def apply(ix: Long): Int = { + if (totalSize - ix <= index || hIndex == 0) current((ix - (totalSize - index)).toInt) + else { + val w = seekSlot(ix) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) + } + } + + /** Retrieves the `ix`th element, using an `Int` index. */ + final def apply(i: Int): Int = apply(i.toLong) + + /** Returns an `IntStepper` over the contents of this `IntAccumulator` */ + final def stepper: IntStepper = new IntAccumulatorStepper(this) + + /** Returns an `Iterator` over the contents of this `IntAccumulator`. The `Iterator` is not specialized. */ + final def iterator = stepper.iterator + + /** Returns a `java.util.Spliterator.OfInt` over the contents of this `IntAccumulator`*/ + final def spliterator: java.util.Spliterator.OfInt = stepper + + /** Produces a sequential Java 8 `IntStream` over the elements of this `IntAccumulator`*/ + final def seqStream: java.util.stream.IntStream = java.util.stream.StreamSupport.intStream(spliterator, false) + + /** Produces a parallel Java 8 `IntStream` over the elements of this `IntAccumulator`*/ + final def parStream: java.util.stream.IntStream = java.util.stream.StreamSupport.intStream(spliterator, true) + + /** Copies the elements in this `IntAccumulator` into an `Array[Int]` */ + final def toArray = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for an array: "+totalSize.toString) + val a = new Array[Int](totalSize.toInt) + var j = 0 + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val cuml = cumulative(h) + val n = (cuml - pv).toInt + pv = cuml + System.arraycopy(x, 0, a, j, n) + j += n + h += 1 + } + System.arraycopy(current, 0, a, j, index) + j += index + a + } + + /** Copies the elements in this `IntAccumulator` to a `List` */ + final def toList: List[Int] = { + var ans: List[Int] = Nil + var i = index - 1 + while (i >= 0) { + ans = current(i) :: ans + i -= 1 + } + var h = hIndex - 1 + while (h >= 0) { + val a = history(h) + i = (cumulative(h) - (if (h == 0) 0L else cumulative(h-1))).toInt - 1 + while (i >= 0) { + ans = a(i) :: ans + i -= 1 + } + h -= 1 + } + ans + } + + /** Copies the elements in this `IntAccumulator` to a specified collection. + * Note that the target collection is not specialized. + * Usage example: `acc.to[Vector]` + */ + final def to[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, Int, Coll[Int]]): Coll[Int] = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for a Scala collection: "+totalSize.toString) + val b = cbf() + b.sizeHint(totalSize.toInt) + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val cuml = cumulative(h) + val n = cuml - pv + pv = cuml + var i = 0 + while (i < n) { + b += x(i) + i += 1 + } + h += 1 + } + var i = 0 + while (i < index) { + b += current(i) + i += 1 + } + b.result + } +} + +object IntAccumulator { + private val emptyIntArray = new Array[Int](0) + private val emptyIntArrayArray = new Array[Array[Int]](0) + + /** A `Supplier` of `IntAccumulator`s, suitable for use with `java.util.stream.IntStream`'s `collect` method. Suitable for `Stream[Int]` also. */ + def supplier = new java.util.function.Supplier[IntAccumulator]{ def get: IntAccumulator = new IntAccumulator } + + /** A `BiConsumer` that adds an element to an `Accumulator`, suitable for use with `java.util.stream.IntStream`'s `collect` method. */ + def adder = new java.util.function.ObjIntConsumer[IntAccumulator]{ def accept(ac: IntAccumulator, a: Int): Unit = { ac += a } } + + /** A `BiConsumer` that adds a boxed `Int` to an `IntAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def boxedAdder = new java.util.function.BiConsumer[IntAccumulator, Int]{ def accept(ac: IntAccumulator, a: Int): Unit = { ac += a } } + + /** A `BiConsumer` that merges `IntAccumulator`s, suitable for use with `java.util.stream.IntStream`'s `collect` method. Suitable for `Stream[Int]` also. */ + def merger = new java.util.function.BiConsumer[IntAccumulator, IntAccumulator]{ def accept(a1: IntAccumulator, a2: IntAccumulator): Unit = { a1 drain a2 } } + + /** Builds an `IntAccumulator` from any `Int`-valued `TraversableOnce` */ + def from[A](source: TraversableOnce[Int]) = { + val a = new IntAccumulator + source.foreach(a += _) + a + } +} + +private[java8] class IntAccumulatorStepper(private val acc: IntAccumulator) extends IntStepper { + import java.util.Spliterator._ + + private var h = 0 + private var i = 0 + private var a = if (acc.hIndex > 0) acc.history(0) else acc.current + private var n = if (acc.hIndex > 0) acc.cumulative(0) else acc.index + private var N = acc.totalSize + + private def duplicateSelf(limit: Long): IntAccumulatorStepper = { + val ans = new IntAccumulatorStepper(acc) + ans.h = h + ans.i = i + ans.a = a + ans.n = n + ans.N = limit + ans + } + + private def loadMore(): Unit = { + h += 1 + if (h < acc.hIndex) { a = acc.history(h); n = acc.cumulative(h) - acc.cumulative(h-1) } + else { a = acc.current; n = acc.index } + i = 0 + } + + def characteristics() = ORDERED | SIZED | SUBSIZED | NONNULL + + def estimateSize = N + + def hasNext = N > 0 + + def nextInt: Int = + if (N <= 0) throw new NoSuchElementException("next on empty Stepper") + else { + if (i >= n) loadMore() + val ans = a(i) + i += 1 + N -= 1 + ans + } + + // Overridden for efficiency + override def tryStep(f: Int => Unit): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + f(a(i)) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def tryAdvance(f: java.util.function.IntConsumer): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + f.accept(a(i)) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def foreach(f: Int => Unit): Unit = { + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + f(a(i)) + i += 1 + } + N -= (n - i0) + } + } + + // Overridden for efficiency + override def forEachRemaining(f: java.util.function.IntConsumer): Unit = { + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + f.accept(a(i)) + i += 1 + } + N -= (n - i0) + } + } + + def substep(): IntStepper = + if (N <= 1) null + else { + val half = (N >> 1) + val M = (if (h <= 0) 0L else acc.cumulative(h-1)) + i + val R = M + half + val ans = duplicateSelf(half) + if (h < acc.hIndex) { + val w = acc.seekSlot(R) + h = (w >>> 32).toInt + if (h < acc.hIndex) { + a = acc.history(h) + n = acc.cumulative(h) - (if (h > 0) acc.cumulative(h-1) else 0) + } + else { + a = acc.current + n = acc.index + } + i = (w & 0xFFFFFFFFL).toInt + } + else i += half.toInt + N -= half + ans + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/collectionImpl/LongAccumulator.scala b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/LongAccumulator.scala new file mode 100644 index 0000000..0e688bf --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/LongAccumulator.scala @@ -0,0 +1,354 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.collectionImpl + +import scala.language.higherKinds + +/** A `LongAccumulator` is a low-level collection specialized for gathering + * elements in parallel and then joining them in order by merging them. + * This is a manually specialized variant of `Accumulator` with no actual + * subclassing relationship with `Accumulator`. + */ +final class LongAccumulator extends AccumulatorLike[Long, LongAccumulator] { self => + private[java8] var current: Array[Long] = LongAccumulator.emptyLongArray + private[java8] var history: Array[Array[Long]] = LongAccumulator.emptyLongArrayArray + + private[java8] def cumulative(i: Int) = { val x = history(i); x(x.length-1) } + + private def expand(): Unit = { + if (index > 0) { + current(current.length-1) = (if (hIndex > 0) { val x = history(hIndex-1); x(x.length-1) } else 0) + index + if (hIndex >= history.length) hExpand() + history(hIndex) = current + hIndex += 1 + } + current = new Array[Long](nextBlockSize+1) + index = 0 + } + + private def hExpand(): Unit = { + if (hIndex == 0) history = new Array[Array[Long]](4) + else history = java.util.Arrays.copyOf(history, history.length << 1) + } + + /** Appends an element to this `LongAccumulator`. */ + final def +=(a: Long): Unit = { + totalSize += 1 + if (index+1 >= current.length) expand() + current(index) = a + index += 1 + } + + /** Removes all elements from `that` and appends them to this `LongAccumulator`. */ + final def drain(that: LongAccumulator): Unit = { + var h = 0 + var prev = 0L + var more = true + while (more && h < that.hIndex) { + val cuml = that.cumulative(h) + val n = (cuml - prev).toInt + if (current.length - index - 1 >= n) { + System.arraycopy(that.history(h), 0, current, index, n) + prev = cuml + index += n + h += 1 + } + else more = false + } + if (h >= that.hIndex && current.length - index - 1>= that.index) { + if (that.index > 0) System.arraycopy(that.current, 0, current, index, that.index) + index += that.index + } + else { + val slots = (if (index > 0) 1 else 0) + that.hIndex - h + if (hIndex + slots > history.length) { + val n = math.max(4, 1 << (32 - java.lang.Integer.numberOfLeadingZeros(1 + hIndex + slots))) + history = java.util.Arrays.copyOf(history, n) + } + var pv = (if (hIndex > 0) cumulative(hIndex-1) else 0L) + if (index > 0) { + val x = + if (index < (current.length >>> 3) && current.length - 1 > 32) { + val ans = java.util.Arrays.copyOf(current, index + 1) + ans(ans.length - 1) = current(current.length - 1) + ans + } + else current + pv = pv + index + x(x.length - 1) = pv + history(hIndex) = x + hIndex += 1 + } + while (h < that.hIndex) { + val cuml = that.cumulative(h) + pv = pv + cuml - prev + prev = cuml + val x = that.history(h) + x(x.length - 1) = pv + history(hIndex) = x + h += 1 + hIndex += 1 + } + index = that.index + current = that.current + } + totalSize += that.totalSize + that.clear + } + + override def clear(): Unit = { + super.clear() + current = LongAccumulator.emptyLongArray + history = LongAccumulator.emptyLongArrayArray + } + + /** Retrieves the `ix`th element. */ + final def apply(ix: Long): Long = { + if (totalSize - ix <= index || hIndex == 0) current((ix - (totalSize - index)).toInt) + else { + val w = seekSlot(ix) + history((w >>> 32).toInt)((w & 0xFFFFFFFFL).toInt) + } + } + + /** Retrieves the `ix`th element, using an `Int` index. */ + final def apply(i: Int): Long = apply(i.toLong) + + /** Returns a `LongStepper` over the contents of this `LongAccumulator`. */ + final def stepper: LongStepper = new LongAccumulatorStepper(this) + + /** Returns an `Iterator` over the contents of this `LongAccumulator`. The `Iterator` is not specialized. */ + final def iterator = stepper.iterator + + /** Returns a `java.util.Spliterator.OfLong` over the contents of this `LongAccumulator`*/ + final def spliterator: java.util.Spliterator.OfLong = stepper + + /** Produces a sequential Java 8 `LongStream` over the elements of this `LongAccumulator`*/ + final def seqStream: java.util.stream.LongStream = java.util.stream.StreamSupport.longStream(spliterator, false) + + /** Produces a parallel Java 8 `LongStream` over the elements of this `LongAccumulator`*/ + final def parStream: java.util.stream.LongStream = java.util.stream.StreamSupport.longStream(spliterator, true) + + /** Copies the elements in this `LongAccumulator` into an `Array[Long]` */ + final def toArray = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for an array: "+totalSize.toString) + val a = new Array[Long](totalSize.toInt) + var j = 0 + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val cuml = x(x.length-1) + val n = (cuml - pv).toInt + pv = cuml + System.arraycopy(x, 0, a, j, n) + j += n + h += 1 + } + System.arraycopy(current, 0, a, j, index) + j += index + a + } + + /** Copies the elements in this `LongAccumulator` to a `List` */ + final def toList: List[Long] = { + var ans: List[Long] = Nil + var i = index - 1 + while (i >= 0) { + ans = current(i) :: ans + i -= 1 + } + var h = hIndex - 1 + while (h >= 0) { + val a = history(h) + i = (cumulative(h) - (if (h == 0) 0L else cumulative(h-1))).toInt - 1 + while (i >= 0) { + ans = a(i) :: ans + i -= 1 + } + h -= 1 + } + ans + } + + /** Copies the elements in this `LongAccumulator` to a specified collection. + * Note that the target collection is not specialized. + * Usage example: `acc.to[Vector]` + */ + final def to[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, Long, Coll[Long]]): Coll[Long] = { + if (totalSize > Int.MaxValue) throw new IllegalArgumentException("Too many elements accumulated for a Scala collection: "+totalSize.toString) + val b = cbf() + b.sizeHint(totalSize.toInt) + var h = 0 + var pv = 0L + while (h < hIndex) { + val x = history(h) + val n = cumulative(h) - pv + pv = cumulative(h) + var i = 0 + while (i < n) { + b += x(i) + i += 1 + } + h += 1 + } + var i = 0 + while (i < index) { + b += current(i) + i += 1 + } + b.result + } +} + +object LongAccumulator { + private val emptyLongArray = new Array[Long](0) + private val emptyLongArrayArray = new Array[Array[Long]](0) + + /** A `Supplier` of `LongAccumulator`s, suitable for use with `java.util.stream.LongStream`'s `collect` method. Suitable for `Stream[Long]` also. */ + def supplier = new java.util.function.Supplier[LongAccumulator]{ def get: LongAccumulator = new LongAccumulator } + + /** A `BiConsumer` that adds an element to an `Accumulator`, suitable for use with `java.util.stream.LongStream`'s `collect` method. */ + def adder = new java.util.function.ObjLongConsumer[LongAccumulator]{ def accept(ac: LongAccumulator, a: Long): Unit = { ac += a } } + + /** A `BiConsumer` that adds a boxed `Long` to an `LongAccumulator`, suitable for use with `java.util.stream.Stream`'s `collect` method. */ + def boxedAdder = new java.util.function.BiConsumer[LongAccumulator, Long]{ def accept(ac: LongAccumulator, a: Long): Unit = { ac += a } } + + /** A `BiConsumer` that merges `LongAccumulator`s, suitable for use with `java.util.stream.LongStream`'s `collect` method. Suitable for `Stream[Long]` also. */ + def merger = new java.util.function.BiConsumer[LongAccumulator, LongAccumulator]{ def accept(a1: LongAccumulator, a2: LongAccumulator): Unit = { a1 drain a2 } } + + /** Builds a `LongAccumulator` from any `Long`-valued `TraversableOnce` */ + def from[A](source: TraversableOnce[Long]) = { + val a = new LongAccumulator + source.foreach(a += _) + a + } +} + +private[java8] class LongAccumulatorStepper(private val acc: LongAccumulator) extends LongStepper { + import java.util.Spliterator._ + + private var h = 0 + private var i = 0 + private var a = if (acc.hIndex > 0) acc.history(0) else acc.current + private var n = if (acc.hIndex > 0) acc.cumulative(0) else acc.index + private var N = acc.totalSize + + private def duplicateSelf(limit: Long): LongAccumulatorStepper = { + val ans = new LongAccumulatorStepper(acc) + ans.h = h + ans.i = i + ans.a = a + ans.n = n + ans.N = limit + ans + } + + private def loadMore(): Unit = { + h += 1 + if (h < acc.hIndex) { a = acc.history(h); n = acc.cumulative(h) - acc.cumulative(h-1) } + else { a = acc.current; n = acc.index } + i = 0 + } + + def characteristics() = ORDERED | SIZED | SUBSIZED | NONNULL + + def estimateSize = N + + def hasNext = N > 0 + + def nextLong: Long = + if (n <= 0) throw new NoSuchElementException("next on empty Stepper") + else { + if (i >= n) loadMore() + val ans = a(i) + i += 1 + N -= 1 + ans + } + + // Overridden for efficiency + override def tryStep(f: Long => Unit): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + f(a(i)) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def tryAdvance(f: java.util.function.LongConsumer): Boolean = + if (N <= 0) false + else { + if (i >= n) loadMore() + f.accept(a(i)) + i += 1 + N -= 1 + true + } + + // Overridden for efficiency + override def foreach(f: Long => Unit): Unit = { + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + f(a(i)) + i += 1 + } + N -= (n - i0) + } + } + + // Overridden for efficiency + override def forEachRemaining(f: java.util.function.LongConsumer): Unit = { + while (N > 0) { + if (i >= n) loadMore() + val i0 = i + if ((n-i) > N) n = i + N.toInt + while (i < n) { + f.accept(a(i)) + i += 1 + } + N -= (n - i0) + } + } + + def substep(): LongStepper = + if (N <= 1) null + else { + val half = (N >> 1) + val M = (if (h <= 0) 0L else acc.cumulative(h-1)) + i + val R = M + half + val ans = duplicateSelf(half) + if (h < acc.hIndex) { + val w = acc.seekSlot(R) + h = (w >>> 32).toInt + if (h < acc.hIndex) { + a = acc.history(h) + n = acc.cumulative(h) - (if (h > 0) acc.cumulative(h-1) else 0) + } + else { + a = acc.current + n = acc.index + } + i = (w & 0xFFFFFFFFL).toInt + } + else i += half.toInt + N -= half + ans + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/collectionImpl/Stepper.scala b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/Stepper.scala new file mode 100644 index 0000000..4bfe250 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/collectionImpl/Stepper.scala @@ -0,0 +1,641 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.collectionImpl + +import scala.language.higherKinds + +import java.util.Spliterator + +/** A Stepper is a specialized collection that can step through its + * contents once. It provides the same test-and-get methods as + * does `Iterator`, named `hasStep` and `nextStep` so they can + * coexist with iterator methods. However, like `Spliterator`, + * steppers provide a `tryStep` method to call a closure if another + * element exists, a `substep()` method to split into pieces, and + * `characteristics` and size-reporting methods that + * implement the subdivision and report what is known about the remaining + * size of the `Stepper`. `Stepper` thus naturally implements both + * `Iterator` and `Spliterator`. + * + * A `Stepper` can present itself as a Spliterator via the `spliterator` + * method, or as a Scala Iterator via the `iterator` method. The `Stepper` + * trait is compatible with both `Spliterator` and Java's generic and + * primitive iterators, so a `Stepper` may already be one or both. + * + * Subtraits `NextStepper` and `TryStepper` fill in the basic capability + * by either implementing `tryStep` in terms of `hasStep` and `nextStep`, + * or vice versa. + * + * Subtraits `AnyStepper`, `DoubleStepper`, `IntStepper`, and `LongStepper` + * implement both the `Stepper` trait and the corresponding Java + * `Spliterator` and `Iterator`/`PrimitiveIterator`. + * + * Example: + * {{{ + * val s = Stepper.of(Vector(1,2,3,4)) + * if (s.hasStep) println(s.nextStep) // Prints 1 + * println(s.tryStep(i => println(i*i))) // Prints 4, then true + * s.substep.foreach(println) // Prints 3 + * println(s.count(_ > 3)) // Prints 4 + * println(s.hasStep) // Prints `false` + * }}} + */ +trait Stepper[@specialized(Double, Int, Long) A] extends StepperLike[A, Stepper[A]] { + /** Drains the contents of this stepper into an `Accumulator` or specialized variant thereof as appropriate. + * This is a terminal operation. + * + * Note: accumulation will occur sequentially. To accumulate in parallel, use a `Stream` (i.e. `.parStream.accumulate`). + */ + def accumulate[Acc <: AccumulatorLike[A, Acc]](implicit accer: scala.compat.java8.converterImpl.AccumulatesFromStepper[A, Acc]) = accer(this) +} + +/** An (optional) marker trait that indicates that a `Stepper` can call `substep` with + * at worst O(log N) time and space complexity, and that the division is likely to + * be reasonably even. + */ +trait EfficientSubstep {} + +/** Provides functionality for Stepper while keeping track of a more precise type of the collection. + */ +trait StepperLike[@specialized(Double, Int, Long) A, +CC] { self: CC => + /** Characteristics are bit flags that indicate runtime characteristics of this Stepper. + * + * - `Distinct` means that no duplicates exist + * - `Immutable` means that the underlying collection is guaranteed not to change during traversal + * - `NonNull` means that no nulls will be returned during traversal + * - `Sized` means that the collection knows its exact size + * - `SubSized` means that sub-Steppers created with `substep()` will also know their own size. `SubSized` steppers must also be `Sized`. + * + * The Java flags `CONCURRENT` and `SORTED` are not supported; modification of a concurrency-aware underlying collection is not + * guaranteed to be any safer than modification of any generic mutable collection, and if the underlying collection is ordered by + * virtue of sorting, `Stepper` will not keep track of that fact. + */ + def characteristics(): Int + + /** Returns the size of the collection, if known exactly, or `-1` if not. */ + def knownSize: Long + + /** `true` if there are more elements to step through, `false` if not. */ + def hasStep: Boolean + + /** The next element traversed by this Stepper. + * `nextStep()` throws an exception if no elements exist, so check `hasStep` immediately prior + * to calling. Note that `tryStep` also consumes an element, so the result of `hasStep` will + * be invalid after `tryStep` is called. + */ + def nextStep(): A + + /** If another element exists, apply `f` to it and return `true`; otherwise, return `false`. */ + def tryStep(f: A => Unit): Boolean + + /** Attempt to split this `Stepper` in half, with the new (returned) copy taking the first half + * of the collection, and this one advancing to cover the second half. If subdivision is not + * possible or not advisable, `substep()` will return `null`. + */ + def substep(): CC + + /** Warns this `Stepper` that it is likely to be used in a parallel context (used for efficiency only) */ + def anticipateParallelism: this.type = this + + + //// + // Terminal operations (do not produce another Stepper) + //// + + /** Consumes all remaining elements in this `Stepper` and counts how many there are. + * This is a terminal operation, though if `knownSize` is non-negative, it won't actually + * iterate over the elements. + */ + def count(): Long = knownSize match { + case x if x < 0 => var n = 0L; while (hasStep) { nextStep; n += 1 }; n + case x => x + } + + /** Consumes all remaining elements in this `Stepper` and counts how many satisfy condition `p`. + * This is a terminal operation. + */ + def count(p: A => Boolean): Long = { var n = 0L; while (hasStep) { if (p(nextStep)) n += 1 }; n } + + /** Searches for an element that satisfies condition `p`. If none are found, it returns `false`. + * This is a terminal operation. + */ + def exists(p: A => Boolean): Boolean = { while(hasStep) { if (p(nextStep)) return true }; false } + + /** Searches for an element that satisifes condition `p`, returning it wrapped in `Some` if one is found, or `None` otherwise. + * This is a terminal operation. + */ + def find(p: A => Boolean): Option[A] = { while (hasStep) { val a = nextStep; if (p(a)) return Some(a) }; None } + + /** Repeatedly applies `op` to propagate an initial value `zero` through all elements of the collection. + * Traversal order is left-to-right. + * This is a terminal operation. + */ + def fold[@specialized(Double, Int, Long) B](zero: B)(op: (B, A) => B) = { var b = zero; while (hasStep) { b = op(b, nextStep) }; b } + + /** Repeatedly applies `op` to propagate an initial value `zero` through the collection until a condition `p` is met. + * If `p` is never met, the result of the last operation is returned. + * This is a terminal operation. + */ + def foldTo[@specialized(Double, Int, Long) B](zero: B)(op: (B, A) => B)(p: B => Boolean) = { var b = zero; while (!p(b) && hasStep) { b = op(b, nextStep) }; b } + + /** Applies `f` to every remaining element in the collection. + * This is a terminal operation. + */ + def foreach(f: A => Unit): Unit = { while (hasStep) f(nextStep) } + + /** Repeatedly merges elements with `op` until only a single element remains. + * Throws an exception if the `Stepper` is empty. + * Merging occurs from left to right. + * This is a terminal operation. + */ + def reduce(op: (A, A) => A): A = { var a = nextStep; while (hasStep) { a = op(a, nextStep) }; a } + + + //// + // Operations that convert to another related type + //// + + /** Returns this `Stepper` as a `java.util.Spliterator`. + * This is a terminal operation. + */ + def spliterator: Spliterator[A] + + /** Returns this `Stepper` as a Scala `Iterator`. + * This is a terminal operation. + */ + def iterator: Iterator[A] = new scala.collection.AbstractIterator[A] { + def hasNext = self.hasStep + def next = self.nextStep + } + + /** Returns a Scala collection of the type requested. */ + def to[Coll[_]](implicit cbf: collection.generic.CanBuildFrom[Nothing, A, Coll[A]]): Coll[A] = { + val b = cbf() + while (hasStep) b += nextStep + b.result() + } +} + + +/** This trait indicates that a `Stepper` will implement `tryStep` in terms of `hasNext` and `nextStep`. */ +trait NextStepper[@specialized(Double, Int, Long) A] extends Stepper[A] with StepperLike[A, NextStepper[A]] { + def tryStep(f: A => Unit) = if (hasStep) { f(nextStep()); true } else false + def spliterator: Spliterator[A] = new ProxySpliteratorViaNext[A](this) +} +private[collectionImpl] class ProxySpliteratorViaNext[A](underlying: NextStepper[A]) extends Spliterator[A] { + def characteristics() = underlying.characteristics + def estimateSize() = underlying.knownSize + def tryAdvance(f: java.util.function.Consumer[_ >: A]): Boolean = if (underlying.hasStep) { f.accept(underlying.nextStep()); true } else false + def trySplit() = underlying.substep() match { case null => null; case x => new ProxySpliteratorViaNext[A](x) } +} + +/** This trait indicates that a `Stepper` will implement `hasNext` and `nextStep` by caching applications of `tryStep`. + * Subclasses must implement `tryUncached` instead of `tryStep`, and should leave it protected, and must implement + * `knownUncachedSize` instead of `knownSize`. For speed, `foreachUncached` may also be overridden. It is recommended + * that all of the `Uncached` methods be left protected. + */ +trait TryStepper[@specialized(Double, Int, Long) A] extends Stepper[A] with StepperLike[A, TryStepper[A]] { + protected def myCache: A + protected def myCache_=(a: A): Unit + protected final var myCacheIsFull = false + private def load(): Boolean = { + myCacheIsFull = tryStep(myCache = _) + myCacheIsFull + } + final def hasStep = myCacheIsFull || load() + final def nextStep = { + if (!myCacheIsFull) { + load() + if (!myCacheIsFull) Stepper.throwNSEE + } + val ans = myCache + myCacheIsFull = false + myCache = null.asInstanceOf[A] + ans + } + final def knownSize = knownUncachedSize + (if (myCacheIsFull) 1 else 0) + protected def knownUncachedSize: Long + final def tryStep(f: A => Unit): Boolean = if (myCacheIsFull) { f(myCache); myCacheIsFull = false; true } else tryUncached(f) + protected def tryUncached(f: A => Unit): Boolean + final override def foreach(f: A => Unit): Unit = { if (myCacheIsFull) { f(myCache); myCacheIsFull = false }; foreachUncached(f) } + protected def foreachUncached(f: A => Unit): Unit = { while (tryUncached(f)) {} } + def spliterator: Spliterator[A] = new ProxySpliteratorViaTry[A](this) +} +private[collectionImpl] class ProxySpliteratorViaTry[A](underlying: TryStepper[A]) extends Spliterator[A] { + def characteristics() = underlying.characteristics + def estimateSize() = underlying.knownSize + def tryAdvance(f: java.util.function.Consumer[_ >: A]): Boolean = underlying.tryStep(a => f.accept(a)) + override def forEachRemaining(f: java.util.function.Consumer[_ >: A]): Unit = { underlying.foreach(a => f.accept(a)) } + def trySplit() = underlying.substep() match { case null => null; case x => new ProxySpliteratorViaTry[A](x) } +} + +/** Any `AnyStepper` combines the functionality of a Java `Iterator`, a Java `Spliterator`, and a `Stepper`. */ +trait AnyStepper[A] extends Stepper[A] with java.util.Iterator[A] with Spliterator[A] with StepperLike[A, AnyStepper[A]] { + override def forEachRemaining(c: java.util.function.Consumer[_ >: A]): Unit = { while (hasNext) { c.accept(next) } } + def hasStep = hasNext() + def knownSize = getExactSizeIfKnown + def nextStep = next + def tryAdvance(c: java.util.function.Consumer[_ >: A]): Boolean = if (hasNext) { c.accept(next); true } else false + def tryStep(f: A => Unit): Boolean = if (hasNext) { f(next); true } else false + def trySplit() = substep + override def spliterator: Spliterator[A] = this + def seqStream: java.util.stream.Stream[A] = java.util.stream.StreamSupport.stream(this, false) + def parStream: java.util.stream.Stream[A] = java.util.stream.StreamSupport.stream(this, true) +} + +private[collectionImpl] object AnyStepper { + final class BoxedDoubleStepper(st: DoubleStepper) extends AnyStepper[Double] { + def hasNext(): Boolean = st.hasNext() + def next(): Double = st.next() + def characteristics(): Int = st.characteristics() + def estimateSize(): Long = st.estimateSize() + def substep(): AnyStepper[Double] = new BoxedDoubleStepper(st.substep()) + } + + final class BoxedIntStepper(st: IntStepper) extends AnyStepper[Int] { + def hasNext(): Boolean = st.hasNext() + def next(): Int = st.next() + def characteristics(): Int = st.characteristics() + def estimateSize(): Long = st.estimateSize() + def substep(): AnyStepper[Int] = new BoxedIntStepper(st.substep()) + } + + final class BoxedLongStepper(st: LongStepper) extends AnyStepper[Long] { + def hasNext(): Boolean = st.hasNext() + def next(): Long = st.next() + def characteristics(): Int = st.characteristics() + def estimateSize(): Long = st.estimateSize() + def substep(): AnyStepper[Long] = new BoxedLongStepper(st.substep()) + } +} + +/** A `DoubleStepper` combines the functionality of a Java `PrimitiveIterator`, a Java `Spliterator`, and a `Stepper`, all specialized for `Double` values. */ +trait DoubleStepper extends Stepper[Double] with java.util.PrimitiveIterator.OfDouble with Spliterator.OfDouble with StepperLike[Double, DoubleStepper] { + override def forEachRemaining(c: java.util.function.Consumer[_ >: java.lang.Double]): Unit = { while (hasNext) { c.accept(java.lang.Double.valueOf(nextDouble)) } } + override def forEachRemaining(c: java.util.function.DoubleConsumer): Unit = { while (hasNext) { c.accept(nextDouble) } } + def hasStep = hasNext() + def knownSize = getExactSizeIfKnown + def nextStep = nextDouble + override def tryAdvance(c: java.util.function.Consumer[_ >: java.lang.Double]): Boolean = if (hasNext) { c.accept(java.lang.Double.valueOf(nextDouble)); true } else false + def tryAdvance(c: java.util.function.DoubleConsumer): Boolean = if (hasNext) { c.accept(nextDouble); true } else false + def tryStep(f: Double => Unit): Boolean = if (hasNext) { f(nextDouble); true } else false + def trySplit() = substep + override def spliterator: Spliterator[Double] = this.asInstanceOf[Spliterator[Double]] // Scala and Java disagree about whether it's java.lang.Double or double + def seqStream: java.util.stream.DoubleStream = java.util.stream.StreamSupport.doubleStream(this, false) + def parStream: java.util.stream.DoubleStream = java.util.stream.StreamSupport.doubleStream(this, true) +} + +/** An `IntStepper` combines the functionality of a Java `PrimitiveIterator`, a Java `Spliterator`, and a `Stepper`, all specialized for `Int` values. */ +trait IntStepper extends Stepper[Int] with java.util.PrimitiveIterator.OfInt with Spliterator.OfInt with StepperLike[Int, IntStepper] { + override def forEachRemaining(c: java.util.function.Consumer[_ >: java.lang.Integer]): Unit = { while (hasNext) { c.accept(java.lang.Integer.valueOf(nextInt)) } } + override def forEachRemaining(c: java.util.function.IntConsumer): Unit = { while (hasNext) { c.accept(nextInt) } } + def hasStep = hasNext() + def knownSize = getExactSizeIfKnown + def nextStep = nextInt + override def tryAdvance(c: java.util.function.Consumer[_ >: java.lang.Integer]): Boolean = if (hasNext) { c.accept(java.lang.Integer.valueOf(nextInt)); true } else false + def tryAdvance(c: java.util.function.IntConsumer): Boolean = if (hasNext) { c.accept(nextInt); true } else false + def tryStep(f: Int => Unit): Boolean = if (hasNext) { f(nextInt); true } else false + def trySplit() = substep + override def spliterator: Spliterator[Int] = this.asInstanceOf[Spliterator[Int]] // Scala and Java disagree about whether it's java.lang.Integer or int + def seqStream: java.util.stream.IntStream = java.util.stream.StreamSupport.intStream(this, false) + def parStream: java.util.stream.IntStream = java.util.stream.StreamSupport.intStream(this, true) +} + +/** A `LongStepper` combines the functionality of a Java `PrimitiveIterator`, a Java `Spliterator`, and a `Stepper`, all specialized for `Long` values. */ +trait LongStepper extends Stepper[Long] with java.util.PrimitiveIterator.OfLong with Spliterator.OfLong with StepperLike[Long, LongStepper] { + override def forEachRemaining(c: java.util.function.Consumer[_ >: java.lang.Long]): Unit = { while (hasNext) { c.accept(java.lang.Long.valueOf(nextLong)) } } + override def forEachRemaining(c: java.util.function.LongConsumer): Unit = { while (hasNext) { c.accept(nextLong) } } + def hasStep = hasNext() + def knownSize = getExactSizeIfKnown + def nextStep = nextLong + override def tryAdvance(c: java.util.function.Consumer[_ >: java.lang.Long]): Boolean = if (hasNext) { c.accept(java.lang.Long.valueOf(nextLong)); true } else false + def tryAdvance(c: java.util.function.LongConsumer): Boolean = if (hasNext) { c.accept(nextLong); true } else false + def tryStep(f: Long => Unit): Boolean = if (hasNext) { f(nextLong); true } else false + def trySplit() = substep + override def spliterator: Spliterator[Long] = this.asInstanceOf[Spliterator[Long]] // Scala and Java disagree about whether it's java.lang.Long or long + def seqStream: java.util.stream.LongStream = java.util.stream.StreamSupport.longStream(this, false) + def parStream: java.util.stream.LongStream = java.util.stream.StreamSupport.longStream(this, true) +} + + +object Stepper { + /** Indicates that a Stepper delivers distinct values (e.g. is backed by a `Set`) */ + val Distinct = Spliterator.DISTINCT + + /** Indicates that a Stepper runs over an immutable collection */ + val Immutable = Spliterator.IMMUTABLE + + /** Indicates that a Stepper will not return any `null` values */ + val NonNull = Spliterator.NONNULL + + /** Indicates that a Stepper delivers elements in a particular order that should be maintained */ + val Ordered = Spliterator.ORDERED + + /** Indicates that a Stepper knows exactly how many elements it contains */ + val Sized = Spliterator.SIZED + + /** Indicates that a Stepper's children (created with substep()) will all know their size. Steppers that are SubSized must also be Sized. */ + val SubSized = Spliterator.SUBSIZED + + private[java8] final def throwNSEE: Nothing = throw new NoSuchElementException("Empty Stepper") + + + private class OfSpliterator[A](sp: Spliterator[A]) + extends AnyStepper[A] with java.util.function.Consumer[A] { + private var cache: A = null.asInstanceOf[A] + private var cached: Boolean = false + def accept(a: A): Unit = { cache = a; cached = true } + + private def loadCache: Boolean = sp.tryAdvance(this) + private def useCache(c: java.util.function.Consumer[_ >: A]): Boolean = { + if (cached) { + c.accept(cache) + cache = null.asInstanceOf[A] + cached = false + true + } + else false + } + + def characteristics = sp.characteristics + def estimateSize = { + val sz = sp.estimateSize + if (cached && sz < Long.MaxValue && sz >= 0) sz + 1 + else sz + } + override def forEachRemaining(c: java.util.function.Consumer[_ >: A]): Unit = { + useCache(c) + sp.forEachRemaining(c) + } + def hasNext = cached || loadCache + def next = { + if (!hasNext) throwNSEE + val ans = cache + cache = null.asInstanceOf[A] + cached = false + ans + } + def substep(): AnyStepper[A] = { + val subSp = sp.trySplit() + if (subSp eq null) null + else { + val sub = new OfSpliterator(subSp) + if (cached) { + sub.cache = cache + sub.cached = true + cache = null.asInstanceOf[A] + cached = false + } + sub + } + } + override def tryAdvance(c: java.util.function.Consumer[_ >: A]) = useCache(c) || sp.tryAdvance(c) + } + + private class OfDoubleSpliterator(sp: Spliterator.OfDouble) + extends DoubleStepper with java.util.function.DoubleConsumer { + private var cache: Double = Double.NaN + private var cached: Boolean = false + def accept(d: Double): Unit = { cache = d; cached = true } + + private def loadCache: Boolean = sp.tryAdvance(this) + private def useCache(c: java.util.function.DoubleConsumer): Boolean = { + if (cached) { + c.accept(cache) + cached = false + true + } + else false + } + + def characteristics = sp.characteristics + def estimateSize = { + val sz = sp.estimateSize + if (cached && sz < Long.MaxValue && sz >= 0) sz + 1 + else sz + } + override def forEachRemaining(c: java.util.function.DoubleConsumer): Unit = { + useCache(c) + sp.forEachRemaining(c) + } + def hasNext = cached || loadCache + def nextDouble = { + if (!hasNext) throwNSEE + val ans = cache + cached = false + ans + } + def substep(): DoubleStepper = { + val subSp = sp.trySplit() + if (subSp eq null) null + else { + val sub = new OfDoubleSpliterator(subSp) + if (cached) { + sub.cache = cache + sub.cached = true + cached = false + } + sub + } + } + override def tryAdvance(c: java.util.function.DoubleConsumer) = useCache(c) || sp.tryAdvance(c) + } + + private class OfIntSpliterator(sp: Spliterator.OfInt) + extends IntStepper with java.util.function.IntConsumer { + private var cache: Int = 0 + private var cached: Boolean = false + def accept(i: Int): Unit = { cache = i; cached = true } + + private def loadCache: Boolean = sp.tryAdvance(this) + private def useCache(c: java.util.function.IntConsumer): Boolean = { + if (cached) { + c.accept(cache) + cached = false + true + } + else false + } + + def characteristics = sp.characteristics + def estimateSize = { + val sz = sp.estimateSize + if (cached && sz < Long.MaxValue && sz >= 0) sz + 1 + else sz + } + override def forEachRemaining(c: java.util.function.IntConsumer): Unit = { + useCache(c) + sp.forEachRemaining(c) + } + def hasNext = cached || loadCache + def nextInt = { + if (!hasNext) throwNSEE + val ans = cache + cached = false + ans + } + def substep(): IntStepper = { + val subSp = sp.trySplit() + if (subSp eq null) null + else { + val sub = new OfIntSpliterator(subSp) + if (cached) { + sub.cache = cache + sub.cached = true + cached = false + } + sub + } + } + override def tryAdvance(c: java.util.function.IntConsumer) = useCache(c) || sp.tryAdvance(c) + } + + private class OfLongSpliterator(sp: Spliterator.OfLong) + extends LongStepper with java.util.function.LongConsumer { + private var cache: Long = 0L + private var cached: Boolean = false + def accept(l: Long): Unit = { cache = l; cached = true } + + private def loadCache: Boolean = sp.tryAdvance(this) + private def useCache(c: java.util.function.LongConsumer): Boolean = { + if (cached) { + c.accept(cache) + cached = false + true + } + else false + } + + def characteristics = sp.characteristics + def estimateSize = { + val sz = sp.estimateSize + if (cached && sz < Long.MaxValue && sz >= 0) sz + 1 + else sz + } + override def forEachRemaining(c: java.util.function.LongConsumer): Unit = { + useCache(c) + sp.forEachRemaining(c) + } + def hasNext = cached || loadCache + def nextLong = { + if (!hasNext) throwNSEE + val ans = cache + cached = false + ans + } + def substep(): LongStepper = { + val subSp = sp.trySplit() + if (subSp eq null) null + else { + val sub = new OfLongSpliterator(subSp) + if (cached) { + sub.cache = cache + sub.cached = true + cached = false + } + sub + } + } + override def tryAdvance(c: java.util.function.LongConsumer) = useCache(c) || sp.tryAdvance(c) + } + + /** Creates a `Stepper` over a generic `Spliterator`. */ + def ofSpliterator[A](sp: Spliterator[A]): AnyStepper[A] = sp match { + case as: AnyStepper[A] => as + case s: DoubleStepper => new AnyStepper.BoxedDoubleStepper(s).asInstanceOf[AnyStepper[A]] + case s: IntStepper => new AnyStepper.BoxedIntStepper(s).asInstanceOf[AnyStepper[A]] + case s: LongStepper => new AnyStepper.BoxedLongStepper(s).asInstanceOf[AnyStepper[A]] + case _ => new OfSpliterator[A](sp) + } + + + /** Creates a `Stepper` over a `DoubleSpliterator`. */ + def ofSpliterator(sp: Spliterator.OfDouble): DoubleStepper = sp match { + case ds: DoubleStepper => ds + case _ => new OfDoubleSpliterator(sp) + } + + /** Creates a `Stepper` over an `IntSpliterator`. */ + def ofSpliterator(sp: Spliterator.OfInt): IntStepper = sp match { + case is: IntStepper => is + case _ => new OfIntSpliterator(sp) + } + + + /** Creates a `Stepper` over a `LongSpliterator`. */ + def ofSpliterator(sp: Spliterator.OfLong): LongStepper = sp match { + case ls: LongStepper => ls + case _ => new OfLongSpliterator(sp) + } + + /* These adapter classes can wrap an AnyStepper of anumeric type into a possibly widened primitive Stepper type. + * This provides a basis for more efficient stream processing on unboxed values provided that the original source + * of the data is boxed. In other cases native implementations of the primitive stepper types should be provided + * (see for example StepsIntArray and StepsWidenedByteArray). */ + + private[java8] class UnboxingDoubleStepper(st: AnyStepper[Double]) extends DoubleStepper { + def hasNext(): Boolean = st.hasNext() + def nextDouble(): Double = st.next() + def characteristics(): Int = st.characteristics() + def estimateSize(): Long = st.estimateSize() + def substep(): DoubleStepper = new UnboxingDoubleStepper(st.substep()) + } + + private[java8] class UnboxingIntStepper(st: AnyStepper[Int]) extends IntStepper { + def hasNext(): Boolean = st.hasNext() + def nextInt(): Int = st.next() + def characteristics(): Int = st.characteristics() + def estimateSize(): Long = st.estimateSize() + def substep(): IntStepper = new UnboxingIntStepper(st.substep()) + } + + private[java8] class UnboxingLongStepper(st: AnyStepper[Long]) extends LongStepper { + def hasNext(): Boolean = st.hasNext() + def nextLong(): Long = st.next() + def characteristics(): Int = st.characteristics() + def estimateSize(): Long = st.estimateSize() + def substep(): LongStepper = new UnboxingLongStepper(st.substep()) + } + + private[java8] class UnboxingByteStepper(st: AnyStepper[Byte]) extends IntStepper { + def hasNext(): Boolean = st.hasNext() + def nextInt(): Int = st.next() + def characteristics(): Int = st.characteristics() | NonNull + def estimateSize(): Long = st.estimateSize() + def substep(): IntStepper = new UnboxingByteStepper(st.substep()) + } + + private[java8] class UnboxingCharStepper(st: AnyStepper[Char]) extends IntStepper { + def hasNext(): Boolean = st.hasNext() + def nextInt(): Int = st.next() + def characteristics(): Int = st.characteristics() | NonNull + def estimateSize(): Long = st.estimateSize() + def substep(): IntStepper = new UnboxingCharStepper(st.substep()) + } + + private[java8] class UnboxingShortStepper(st: AnyStepper[Short]) extends IntStepper { + def hasNext(): Boolean = st.hasNext() + def nextInt(): Int = st.next() + def characteristics(): Int = st.characteristics() | NonNull + def estimateSize(): Long = st.estimateSize() + def substep(): IntStepper = new UnboxingShortStepper(st.substep()) + } + + private[java8] class UnboxingFloatStepper(st: AnyStepper[Float]) extends DoubleStepper { + def hasNext(): Boolean = st.hasNext() + def nextDouble(): Double = st.next() + def characteristics(): Int = st.characteristics() | NonNull + def estimateSize(): Long = st.estimateSize() + def substep(): DoubleStepper = new UnboxingFloatStepper(st.substep()) + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/Accumulates.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/Accumulates.scala new file mode 100644 index 0000000..16e50bc --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/Accumulates.scala @@ -0,0 +1,87 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ + +trait AccumulatesFromStepper[@specialized(Double, Int, Long) A, Acc <: AccumulatorLike[A, Acc]] { + def apply(stepper: Stepper[A]): Acc +} + +final class CollectionCanAccumulate[A](private val underlying: TraversableOnce[A]) extends AnyVal { + def accumulate: Accumulator[A] = { + val a = new Accumulator[A] + underlying.foreach(a += _) + a + } +} + +final class AccumulateDoubleCollection(private val underlying: TraversableOnce[Double]) extends AnyVal { + def accumulate: DoubleAccumulator = { + val da = new DoubleAccumulator + underlying.foreach(da += _) + da + } +} + +final class AccumulateIntCollection(private val underlying: TraversableOnce[Int]) extends AnyVal { + def accumulate: IntAccumulator = { + val da = new IntAccumulator + underlying.foreach(da += _) + da + } +} + +final class AccumulateLongCollection(private val underlying: TraversableOnce[Long]) extends AnyVal { + def accumulate: LongAccumulator = { + val da = new LongAccumulator + underlying.foreach(da += _) + da + } +} + +final class AccumulateAnyArray[A](private val underlying: Array[A]) extends AnyVal { + def accumulate: Accumulator[A] = { + val a = new Accumulator[A] + var i = 0 + while (i < underlying.length) { a += underlying(i); i += 1 } + a + } +} + +final class AccumulateDoubleArray(private val underlying: Array[Double]) extends AnyVal { + def accumulate: DoubleAccumulator = { + val da = new DoubleAccumulator + var i = 0 + while (i < underlying.length) { da += underlying(i); i += 1 } + da + } +} + +final class AccumulateIntArray(private val underlying: Array[Int]) extends AnyVal { + def accumulate: IntAccumulator = { + val da = new IntAccumulator + var i = 0 + while (i < underlying.length) { da += underlying(i); i += 1 } + da + } +} + +final class AccumulateLongArray(private val underlying: Array[Long]) extends AnyVal { + def accumulate: LongAccumulator = { + val da = new LongAccumulator + var i = 0 + while (i < underlying.length) { da += underlying(i); i += 1 } + da + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/AccumulatorConverters.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/AccumulatorConverters.scala new file mode 100644 index 0000000..e553443 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/AccumulatorConverters.scala @@ -0,0 +1,47 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import language.implicitConversions + +import scala.compat.java8.collectionImpl._ + +trait Priority3AccumulatorConverters { + implicit def collectionCanAccumulate[A](underlying: TraversableOnce[A]) = new CollectionCanAccumulate[A](underlying) +} + +trait Priority2AccumulatorConverters extends Priority3AccumulatorConverters { + implicit def accumulateDoubleCollection(underlying: TraversableOnce[Double]) = new AccumulateDoubleCollection(underlying) + implicit def accumulateIntCollection(underlying: TraversableOnce[Int]) = new AccumulateIntCollection(underlying) + implicit def accumulateLongCollection(underlying: TraversableOnce[Long]) = new AccumulateLongCollection(underlying) + implicit def accumulateAnyArray[A](underlying: Array[A]) = new AccumulateAnyArray(underlying) +} + +trait Priority1AccumulatorConverters extends Priority2AccumulatorConverters { + implicit def accumulateDoubleArray(underlying: Array[Double]) = new AccumulateDoubleArray(underlying) + implicit def accumulateIntArray(underlying: Array[Int]) = new AccumulateIntArray(underlying) + implicit def accumulateLongArray(underlying: Array[Long]) = new AccumulateLongArray(underlying) + + implicit def accumulateAnyStepper[A]: AccumulatesFromStepper[A, Accumulator[A]] = + PrivateAccumulatorConverters.genericAccumulateAnyStepper.asInstanceOf[AccumulatesFromStepper[A, Accumulator[A]]] +} + +private[java8] object PrivateAccumulatorConverters { + val genericAccumulateAnyStepper: AccumulatesFromStepper[Any, Accumulator[Any]] = new AccumulatesFromStepper[Any, Accumulator[Any]] { + def apply(stepper: Stepper[Any]) = { + val a = new Accumulator[Any] + while (stepper.hasStep) a += stepper.nextStep + a + } + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/MakesSteppers.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/MakesSteppers.scala new file mode 100644 index 0000000..f2ec563 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/MakesSteppers.scala @@ -0,0 +1,102 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ + +trait MakesStepper[T, +Extra] extends Any { + /** Generates a fresh stepper of type `S` for element type `T` */ + def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]): S with Extra +} + +trait MakesKeyValueStepper[K, V, +Extra] extends Any { + /** Generates a fresh stepper of type `S` over map keys of type `K` */ + def keyStepper[S <: Stepper[_]](implicit ss: StepperShape[K, S]): S with Extra + + /** Generates a fresh stepper of type `S` over map values of type `V` */ + def valueStepper[S <: Stepper[_]](implicit ss: StepperShape[V, S]): S with Extra +} + +/** Encodes the translation from an element type `T` to the corresponding Stepper type `S` */ +sealed trait StepperShape[T, S <: Stepper[_]] { + /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ + def shape: Int + + /** Create an unboxing primitive sequential Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def seqUnbox(st: AnyStepper[T]): S + + /** Create an unboxing primitive parallel (i.e. `with EfficientSubstep`) Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def parUnbox(st: AnyStepper[T] with EfficientSubstep): S with EfficientSubstep +} +object StepperShape extends StepperShapeLowPriority { + // reference + final val Reference = 0 + + // primitive + final val IntValue = 1 + final val LongValue = 2 + final val DoubleValue = 3 + + // widening + final val ByteValue = 4 + final val ShortValue = 5 + final val CharValue = 6 + final val FloatValue = 7 + + implicit val intStepperShape: StepperShape[Int, IntStepper] = new StepperShape[Int, IntStepper] { + def shape = IntValue + def seqUnbox(st: AnyStepper[Int]): IntStepper = new Stepper.UnboxingIntStepper(st) + def parUnbox(st: AnyStepper[Int] with EfficientSubstep): IntStepper with EfficientSubstep = new Stepper.UnboxingIntStepper(st) with EfficientSubstep + } + implicit val longStepperShape: StepperShape[Long, LongStepper] = new StepperShape[Long, LongStepper] { + def shape = LongValue + def seqUnbox(st: AnyStepper[Long]): LongStepper = new Stepper.UnboxingLongStepper(st) + def parUnbox(st: AnyStepper[Long] with EfficientSubstep): LongStepper with EfficientSubstep = new Stepper.UnboxingLongStepper(st) with EfficientSubstep + } + implicit val doubleStepperShape: StepperShape[Double, DoubleStepper] = new StepperShape[Double, DoubleStepper] { + def shape = DoubleValue + def seqUnbox(st: AnyStepper[Double]): DoubleStepper = new Stepper.UnboxingDoubleStepper(st) + def parUnbox(st: AnyStepper[Double] with EfficientSubstep): DoubleStepper with EfficientSubstep = new Stepper.UnboxingDoubleStepper(st) with EfficientSubstep + } + implicit val byteStepperShape: StepperShape[Byte, IntStepper] = new StepperShape[Byte, IntStepper] { + def shape = ByteValue + def seqUnbox(st: AnyStepper[Byte]): IntStepper = new Stepper.UnboxingByteStepper(st) + def parUnbox(st: AnyStepper[Byte] with EfficientSubstep): IntStepper with EfficientSubstep = new Stepper.UnboxingByteStepper(st) with EfficientSubstep + } + implicit val shortStepperShape: StepperShape[Short, IntStepper] = new StepperShape[Short, IntStepper] { + def shape = ShortValue + def seqUnbox(st: AnyStepper[Short]): IntStepper = new Stepper.UnboxingShortStepper(st) + def parUnbox(st: AnyStepper[Short] with EfficientSubstep): IntStepper with EfficientSubstep = new Stepper.UnboxingShortStepper(st) with EfficientSubstep + } + implicit val charStepperShape: StepperShape[Char, IntStepper] = new StepperShape[Char, IntStepper] { + def shape = CharValue + def seqUnbox(st: AnyStepper[Char]): IntStepper = new Stepper.UnboxingCharStepper(st) + def parUnbox(st: AnyStepper[Char] with EfficientSubstep): IntStepper with EfficientSubstep = new Stepper.UnboxingCharStepper(st) with EfficientSubstep + } + implicit val floatStepperShape: StepperShape[Float, DoubleStepper] = new StepperShape[Float, DoubleStepper] { + def shape = FloatValue + def seqUnbox(st: AnyStepper[Float]): DoubleStepper = new Stepper.UnboxingFloatStepper(st) + def parUnbox(st: AnyStepper[Float] with EfficientSubstep): DoubleStepper with EfficientSubstep = new Stepper.UnboxingFloatStepper(st) with EfficientSubstep + } +} +trait StepperShapeLowPriority { + implicit def anyStepperShape[T] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, AnyStepper[T]]] + + private[this] val anyStepperShapePrototype: StepperShape[AnyRef, AnyStepper[AnyRef]] = new StepperShape[AnyRef, AnyStepper[AnyRef]] { + def shape = StepperShape.Reference + def seqUnbox(st: AnyStepper[AnyRef]): AnyStepper[AnyRef] = st + def parUnbox(st: AnyStepper[AnyRef] with EfficientSubstep): AnyStepper[AnyRef] with EfficientSubstep = st + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepConverters.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepConverters.scala new file mode 100644 index 0000000..7bab448 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepConverters.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import language.implicitConversions + +trait Priority3StepConverters { + implicit def richIterableCanStep[A](underlying: Iterable[A]) = new RichIterableCanStep(underlying) + implicit def richMapCanStep[K, V](underlying: collection.Map[K, V]) = new RichMapCanStep[K, V](underlying) +} + +trait Priority2StepConverters extends Priority3StepConverters { + implicit def richLinearSeqCanStep[A](underlying: collection.LinearSeq[A]) = new RichLinearSeqCanStep[A](underlying) + implicit def richIndexedSeqCanStep[A](underlying: collection.IndexedSeqLike[A, _]) = new RichIndexedSeqCanStep[A](underlying) +} + +trait Priority1StepConverters extends Priority2StepConverters { + implicit def richDefaultHashTableCanStep[K, V](underlying: collection.mutable.HashTable[K, collection.mutable.DefaultEntry[K, V]]) = new RichDefaultHashTableCanStep[K, V](underlying) + implicit def richLinkedHashTableCanStep[K, V](underlying: collection.mutable.HashTable[K, collection.mutable.LinkedEntry[K, V]]) = new RichLinkedHashTableCanStep[K, V](underlying) + implicit def richArrayCanStep[A](underlying: Array[A]) = new RichArrayCanStep[A](underlying) + implicit def richWrappedArrayCanStep[A](underlying: collection.mutable.WrappedArray[A]) = new RichArrayCanStep[A](underlying.array) + implicit def richFlatHashTableCanStep[A](underlying: collection.mutable.FlatHashTable[A]) = new RichFlatHashTableCanStep[A](underlying) + implicit def richIteratorCanStep[A](underlying: Iterator[A]) = new RichIteratorCanStep(underlying) + implicit def richImmHashMapCanStep[K, V](underlying: collection.immutable.HashMap[K, V]) = new RichImmHashMapCanStep[K, V](underlying) + implicit def richImmHashSetCanStep[A](underlying: collection.immutable.HashSet[A]) = new RichImmHashSetCanStep[A](underlying) + implicit def richNumericRangeCanStep[T](underlying: collection.immutable.NumericRange[T]) = new RichNumericRangeCanStep(underlying) + implicit def richVectorCanStep[A](underlying: Vector[A]) = new RichVectorCanStep[A](underlying) + implicit def richBitSetCanStep(underlying: collection.BitSet) = new RichBitSetCanStep(underlying) + implicit def richRangeCanStep(underlying: Range) = new RichRangeCanStep(underlying) + implicit def richStringCanStep(underlying: String) = new RichStringCanStep(underlying) +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsArray.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsArray.scala new file mode 100644 index 0000000..6c50bad --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsArray.scala @@ -0,0 +1,97 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ + +import Stepper._ + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +private[java8] class StepsObjectArray[A <: Object](underlying: Array[A], _i0: Int, _iN: Int) +extends StepsLikeIndexed[A, StepsObjectArray[A]](_i0, _iN) { + def next() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsObjectArray[A](underlying, i0, half) +} + +private[java8] class StepsBoxedBooleanArray(underlying: Array[Boolean], _i0: Int, _iN: Int) +extends StepsLikeIndexed[Boolean, StepsBoxedBooleanArray](_i0, _iN) { + def next() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsBoxedBooleanArray(underlying, i0, half) +} + +private[java8] class StepsWidenedByteArray(underlying: Array[Byte], _i0: Int, _iN: Int) +extends StepsIntLikeIndexed[StepsWidenedByteArray](_i0, _iN) { + def nextInt() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsWidenedByteArray(underlying, i0, half) +} + +private[java8] class StepsWidenedCharArray(underlying: Array[Char], _i0: Int, _iN: Int) +extends StepsIntLikeIndexed[StepsWidenedCharArray](_i0, _iN) { + def nextInt() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsWidenedCharArray(underlying, i0, half) +} + +private[java8] class StepsWidenedShortArray(underlying: Array[Short], _i0: Int, _iN: Int) +extends StepsIntLikeIndexed[StepsWidenedShortArray](_i0, _iN) { + def nextInt() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsWidenedShortArray(underlying, i0, half) +} + +private[java8] class StepsWidenedFloatArray(underlying: Array[Float], _i0: Int, _iN: Int) +extends StepsDoubleLikeIndexed[StepsWidenedFloatArray](_i0, _iN) { + def nextDouble() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsWidenedFloatArray(underlying, i0, half) +} + +private[java8] class StepsDoubleArray(underlying: Array[Double], _i0: Int, _iN: Int) +extends StepsDoubleLikeIndexed[StepsDoubleArray](_i0, _iN) { + def nextDouble() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsDoubleArray(underlying, i0, half) +} + +private[java8] class StepsIntArray(underlying: Array[Int], _i0: Int, _iN: Int) +extends StepsIntLikeIndexed[StepsIntArray](_i0, _iN) { + def nextInt() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsIntArray(underlying, i0, half) +} + +private[java8] class StepsLongArray(underlying: Array[Long], _i0: Int, _iN: Int) +extends StepsLongLikeIndexed[StepsLongArray](_i0, _iN) { + def nextLong() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsLongArray(underlying, i0, half) +} + +////////////////////////// +// Value class adapters // +////////////////////////// + +final class RichArrayCanStep[T](private val underlying: Array[T]) extends AnyVal with MakesStepper[T, EfficientSubstep] { + override def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]) = ((ss.shape: @switch) match { + case StepperShape.Reference => + if(underlying.isInstanceOf[Array[Boolean]]) + new StepsBoxedBooleanArray (underlying.asInstanceOf[Array[Boolean]], 0, underlying.length) + else new StepsObjectArray[AnyRef](underlying.asInstanceOf[Array[AnyRef ]], 0, underlying.length) + case StepperShape.IntValue => new StepsIntArray (underlying.asInstanceOf[Array[Int ]], 0, underlying.length) + case StepperShape.LongValue => new StepsLongArray (underlying.asInstanceOf[Array[Long ]], 0, underlying.length) + case StepperShape.DoubleValue => new StepsDoubleArray (underlying.asInstanceOf[Array[Double ]], 0, underlying.length) + case StepperShape.ByteValue => new StepsWidenedByteArray (underlying.asInstanceOf[Array[Byte ]], 0, underlying.length) + case StepperShape.ShortValue => new StepsWidenedShortArray (underlying.asInstanceOf[Array[Short ]], 0, underlying.length) + case StepperShape.CharValue => new StepsWidenedCharArray (underlying.asInstanceOf[Array[Char ]], 0, underlying.length) + case StepperShape.FloatValue => new StepsWidenedFloatArray (underlying.asInstanceOf[Array[Float ]], 0, underlying.length) + }).asInstanceOf[S with EfficientSubstep] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsBitSet.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsBitSet.scala new file mode 100644 index 0000000..7c3126b --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsBitSet.scala @@ -0,0 +1,81 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ +import scala.compat.java8.runtime._ + +import Stepper._ + +//////////////////////////// +// Stepper implementation // +//////////////////////////// + +private[java8] class StepsIntBitSet(_underlying: Array[Long], _i0: Int, _iN: Int) +extends StepsIntLikeSliced[Array[Long], StepsIntBitSet](_underlying, _i0, _iN) { + private var mask: Long = (-1L) << (i & 0x3F) + private var cache: Long = underlying(i >>> 6) + private var found: Boolean = false + def semiclone(half: Int) = { + val ans = new StepsIntBitSet(underlying, i, half) + i = half + mask = (-1L) << (i & 0x3F) + cache = underlying(i >>> 6) + found = false + ans + } + def hasNext(): Boolean = found || ((i < iN) && { + while ((mask & cache) == 0) { + i += java.lang.Long.numberOfLeadingZeros(~mask) + if (i < 0 || i >= iN) { i = iN; return false } + mask = -1L + cache = underlying(i >>> 6) + } + var m = mask << 1 + while ((mask & cache) == (m & cache)) { + mask = m + m = mask << 1 + i += 1 + } + if (i < 0 || i >= iN) { + i = iN + false + } + else { + found = true + true + } + }) + def nextInt() = if (hasNext) { val j = i; found = false; mask = mask << 1; i += 1; j } else throwNSEE +} + +///////////////////////// +// Value class adapter // +///////////////////////// + +final class RichBitSetCanStep(private val underlying: collection.BitSet) extends AnyVal with MakesStepper[Int, EfficientSubstep] { + override def stepper[S <: Stepper[_]](implicit ss: StepperShape[Int, S]) = { + val bits: Array[Long] = underlying match { + case m: collection.mutable.BitSet => CollectionInternals.getBitSetInternals(m) + case n: collection.immutable.BitSet.BitSetN => RichBitSetCanStep.reflectInternalsN(n) + case x => x.toBitMask + } + new StepsIntBitSet(bits, 0, math.min(bits.length*64L, Int.MaxValue).toInt).asInstanceOf[S with EfficientSubstep] + } +} + +private[java8] object RichBitSetCanStep { + private val reflector = classOf[collection.immutable.BitSet.BitSetN].getMethod("elems") + def reflectInternalsN(bsn: collection.immutable.BitSet.BitSetN): Array[Long] = reflector.invoke(bsn).asInstanceOf[Array[Long]] +} + diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsFlatHashTable.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsFlatHashTable.scala new file mode 100644 index 0000000..035aef8 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsFlatHashTable.scala @@ -0,0 +1,64 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ +import scala.compat.java8.runtime._ + +import Stepper._ + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +private[java8] class StepsAnyFlatHashTable[A](_underlying: Array[AnyRef], _i0: Int, _iN: Int) +extends StepsLikeGapped[A, StepsAnyFlatHashTable[A]](_underlying, _i0, _iN) { + def next() = if (currentEntry eq null) throwNSEE else { val ans = currentEntry.asInstanceOf[A]; currentEntry = null; ans } + def semiclone(half: Int) = new StepsAnyFlatHashTable[A](underlying, i0, half) +} + +private[java8] class StepsDoubleFlatHashTable(_underlying: Array[AnyRef], _i0: Int, _iN: Int) +extends StepsDoubleLikeGapped[StepsDoubleFlatHashTable](_underlying, _i0, _iN) { + def nextDouble() = if (currentEntry eq null) throwNSEE else { val ans = currentEntry.asInstanceOf[Double]; currentEntry = null; ans } + def semiclone(half: Int) = new StepsDoubleFlatHashTable(underlying, i0, half) +} + +private[java8] class StepsIntFlatHashTable(_underlying: Array[AnyRef], _i0: Int, _iN: Int) +extends StepsIntLikeGapped[StepsIntFlatHashTable](_underlying, _i0, _iN) { + def nextInt() = if (currentEntry eq null) throwNSEE else { val ans = currentEntry.asInstanceOf[Int]; currentEntry = null; ans } + def semiclone(half: Int) = new StepsIntFlatHashTable(underlying, i0, half) +} + +private[java8] class StepsLongFlatHashTable(_underlying: Array[AnyRef], _i0: Int, _iN: Int) +extends StepsLongLikeGapped[StepsLongFlatHashTable](_underlying, _i0, _iN) { + def nextLong() = if (currentEntry eq null) throwNSEE else { val ans = currentEntry.asInstanceOf[Long]; currentEntry = null; ans } + def semiclone(half: Int) = new StepsLongFlatHashTable(underlying, i0, half) +} + +////////////////////////// +// Value class adapters // +////////////////////////// + +final class RichFlatHashTableCanStep[T](private val underlying: collection.mutable.FlatHashTable[T]) extends AnyVal with MakesStepper[T, EfficientSubstep] { + override def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]) = { + val tbl = CollectionInternals.getTable(underlying) + ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntFlatHashTable (tbl, 0, tbl.length) + case StepperShape.LongValue => new StepsLongFlatHashTable (tbl, 0, tbl.length) + case StepperShape.DoubleValue => new StepsDoubleFlatHashTable(tbl, 0, tbl.length) + case _ => ss.parUnbox(new StepsAnyFlatHashTable[T](tbl, 0, tbl.length)) + }).asInstanceOf[S with EfficientSubstep] + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsHashTable.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsHashTable.scala new file mode 100644 index 0000000..e3d053c --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsHashTable.scala @@ -0,0 +1,209 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ +import scala.compat.java8.runtime._ + +import Stepper._ + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +// Steppers for keys (type of HashEntry doesn't matter) + +private[java8] class StepsAnyHashTableKey[K](_underlying: Array[collection.mutable.HashEntry[K, _]], _i0: Int, _iN: Int) +extends StepsLikeGapped[K, StepsAnyHashTableKey[K]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def next() = if (currentEntry eq null) throwNSEE else { val ans = currentEntry.asInstanceOf[collection.mutable.HashEntry[K, _]].key; currentEntry = currentEntry.asInstanceOf[collection.mutable.HashEntry[K, _]].next.asInstanceOf[AnyRef]; ans } + def semiclone(half: Int) = new StepsAnyHashTableKey[K](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, _]]], i0, half) +} + +private[java8] class StepsDoubleHashTableKey(_underlying: Array[collection.mutable.HashEntry[Double, _]], _i0: Int, _iN: Int) +extends StepsDoubleLikeGapped[StepsDoubleHashTableKey](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def nextDouble() = if (currentEntry eq null) throwNSEE else { val ans = currentEntry.asInstanceOf[collection.mutable.HashEntry[Double, _]].key; currentEntry = currentEntry.asInstanceOf[collection.mutable.HashEntry[Double, _]].next.asInstanceOf[AnyRef]; ans } + def semiclone(half: Int) = new StepsDoubleHashTableKey(underlying.asInstanceOf[Array[collection.mutable.HashEntry[Double, _]]], i0, half) +} + +private[java8] class StepsIntHashTableKey(_underlying: Array[collection.mutable.HashEntry[Int, _]], _i0: Int, _iN: Int) +extends StepsIntLikeGapped[StepsIntHashTableKey](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def nextInt() = if (currentEntry eq null) throwNSEE else { val ans = currentEntry.asInstanceOf[collection.mutable.HashEntry[Int, _]].key; currentEntry = currentEntry.asInstanceOf[collection.mutable.HashEntry[Int, _]].next.asInstanceOf[AnyRef]; ans } + def semiclone(half: Int) = new StepsIntHashTableKey(underlying.asInstanceOf[Array[collection.mutable.HashEntry[Int, _]]], i0, half) +} + +private[java8] class StepsLongHashTableKey(_underlying: Array[collection.mutable.HashEntry[Long, _]], _i0: Int, _iN: Int) +extends StepsLongLikeGapped[StepsLongHashTableKey](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def nextLong() = if (currentEntry eq null) throwNSEE else { val ans = currentEntry.asInstanceOf[collection.mutable.HashEntry[Long, _]].key; currentEntry = currentEntry.asInstanceOf[collection.mutable.HashEntry[Long, _]].next.asInstanceOf[AnyRef]; ans } + def semiclone(half: Int) = new StepsLongHashTableKey(underlying.asInstanceOf[Array[collection.mutable.HashEntry[Long, _]]], i0, half) +} + +// Steppers for entries stored in DefaultEntry HashEntry +// (both for key-value pair and for values alone) + +private[java8] class StepsAnyDefaultHashTable[K, V](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, V]]], _i0: Int, _iN: Int) +extends StepsLikeGapped[(K, V), StepsAnyDefaultHashTable[K, V]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def next() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.DefaultEntry[K, V]]; currentEntry = de.next; (de.key, de.value) } + def semiclone(half: Int) = + new StepsAnyDefaultHashTable[K, V](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, V]]]], i0, half) +} + +private[java8] class StepsAnyDefaultHashTableValue[K, V](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, V]]], _i0: Int, _iN: Int) +extends StepsLikeGapped[V, StepsAnyDefaultHashTableValue[K, V]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def next() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.DefaultEntry[K, V]]; currentEntry = de.next; de.value } + def semiclone(half: Int) = + new StepsAnyDefaultHashTableValue[K, V](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, V]]]], i0, half) +} + +private[java8] class StepsDoubleDefaultHashTableValue[K](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, Double]]], _i0: Int, _iN: Int) +extends StepsDoubleLikeGapped[StepsDoubleDefaultHashTableValue[K]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def nextDouble() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.DefaultEntry[K, Double]]; currentEntry = de.next; de.value } + def semiclone(half: Int) = + new StepsDoubleDefaultHashTableValue[K](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, Double]]]], i0, half) +} + +private[java8] class StepsIntDefaultHashTableValue[K](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, Int]]], _i0: Int, _iN: Int) +extends StepsIntLikeGapped[StepsIntDefaultHashTableValue[K]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def nextInt() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.DefaultEntry[K, Int]]; currentEntry = de.next; de.value } + def semiclone(half: Int) = + new StepsIntDefaultHashTableValue[K](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, Int]]]], i0, half) +} + +private[java8] class StepsLongDefaultHashTableValue[K](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, Long]]], _i0: Int, _iN: Int) +extends StepsLongLikeGapped[StepsLongDefaultHashTableValue[K]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def nextLong() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.DefaultEntry[K, Long]]; currentEntry = de.next; de.value } + def semiclone(half: Int) = + new StepsLongDefaultHashTableValue[K](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, Long]]]], i0, half) +} + +// Steppers for entries stored in LinkedEntry HashEntry +// (both for key-value pair and for values alone) + +private[java8] class StepsAnyLinkedHashTable[K, V](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, V]]], _i0: Int, _iN: Int) +extends StepsLikeGapped[(K, V), StepsAnyLinkedHashTable[K, V]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def next() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.LinkedEntry[K, V]]; currentEntry = de.next; (de.key, de.value) } + def semiclone(half: Int) = + new StepsAnyLinkedHashTable[K, V](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, V]]]], i0, half) +} + +private[java8] class StepsAnyLinkedHashTableValue[K, V](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, V]]], _i0: Int, _iN: Int) +extends StepsLikeGapped[V, StepsAnyLinkedHashTableValue[K, V]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def next() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.LinkedEntry[K, V]]; currentEntry = de.next; de.value } + def semiclone(half: Int) = + new StepsAnyLinkedHashTableValue[K, V](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, V]]]], i0, half) +} + +private[java8] class StepsDoubleLinkedHashTableValue[K](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, Double]]], _i0: Int, _iN: Int) +extends StepsDoubleLikeGapped[StepsDoubleLinkedHashTableValue[K]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def nextDouble() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.LinkedEntry[K, Double]]; currentEntry = de.next; de.value } + def semiclone(half: Int) = + new StepsDoubleLinkedHashTableValue[K](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, Double]]]], i0, half) +} + +private[java8] class StepsIntLinkedHashTableValue[K](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, Int]]], _i0: Int, _iN: Int) +extends StepsIntLikeGapped[StepsIntLinkedHashTableValue[K]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def nextInt() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.LinkedEntry[K, Int]]; currentEntry = de.next; de.value } + def semiclone(half: Int) = + new StepsIntLinkedHashTableValue[K](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, Int]]]], i0, half) +} + +private[java8] class StepsLongLinkedHashTableValue[K](_underlying: Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, Long]]], _i0: Int, _iN: Int) +extends StepsLongLikeGapped[StepsLongLinkedHashTableValue[K]](_underlying.asInstanceOf[Array[AnyRef]], _i0, _iN) { + def nextLong() = + if (currentEntry eq null) throwNSEE + else { val de = currentEntry.asInstanceOf[collection.mutable.LinkedEntry[K, Long]]; currentEntry = de.next; de.value } + def semiclone(half: Int) = + new StepsLongLinkedHashTableValue[K](underlying.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, Long]]]], i0, half) +} + + +////////////////////////// +// Value class adapters // +////////////////////////// + +// Steppers for entries stored in DefaultEntry HashEntry + +final class RichDefaultHashTableCanStep[K, V](private val underlying: collection.mutable.HashTable[K, collection.mutable.DefaultEntry[K, V]]) extends AnyVal with MakesKeyValueStepper[K, V, EfficientSubstep] with MakesStepper[(K, V), EfficientSubstep] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[(K, V), S]) = { + val tbl = CollectionInternals.getTable[K, collection.mutable.DefaultEntry[K, V]](underlying) + new StepsAnyDefaultHashTable(tbl, 0, tbl.length).asInstanceOf[S with EfficientSubstep] + } + + def keyStepper[S <: Stepper[_]](implicit ss: StepperShape[K, S]) = { + val tbl = CollectionInternals.getTable[K, collection.mutable.DefaultEntry[K, V]](underlying) + ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntHashTableKey (tbl.asInstanceOf[Array[collection.mutable.HashEntry[Int, _]]], 0, tbl.length) + case StepperShape.LongValue => new StepsLongHashTableKey (tbl.asInstanceOf[Array[collection.mutable.HashEntry[Long, _]]], 0, tbl.length) + case StepperShape.DoubleValue => new StepsDoubleHashTableKey(tbl.asInstanceOf[Array[collection.mutable.HashEntry[Double, _]]], 0, tbl.length) + case _ => ss.parUnbox(new StepsAnyHashTableKey (tbl.asInstanceOf[Array[collection.mutable.HashEntry[K, _]]], 0, tbl.length)) + }).asInstanceOf[S with EfficientSubstep] + } + + def valueStepper[S <: Stepper[_]](implicit ss: StepperShape[V, S]) = { + val tbl = CollectionInternals.getTable[K, collection.mutable.DefaultEntry[K, V]](underlying) + ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntDefaultHashTableValue (tbl.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, Int ]]]], 0, tbl.length) + case StepperShape.LongValue => new StepsLongDefaultHashTableValue (tbl.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, Long ]]]], 0, tbl.length) + case StepperShape.DoubleValue => new StepsDoubleDefaultHashTableValue(tbl.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.DefaultEntry[K, Double]]]], 0, tbl.length) + case _ => ss.parUnbox(new StepsAnyDefaultHashTableValue (tbl, 0, tbl.length)) + }).asInstanceOf[S with EfficientSubstep] + } +} + +// Steppers for entries stored in LinkedEntry HashEntry + +final class RichLinkedHashTableCanStep[K, V](private val underlying: collection.mutable.HashTable[K, collection.mutable.LinkedEntry[K, V]]) extends AnyVal with MakesKeyValueStepper[K, V, EfficientSubstep] with MakesStepper[(K, V), EfficientSubstep] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[(K, V), S]) = { + val tbl = CollectionInternals.getTable[K, collection.mutable.LinkedEntry[K, V]](underlying) + new StepsAnyLinkedHashTable(tbl, 0, tbl.length).asInstanceOf[S with EfficientSubstep] + } + + def keyStepper[S <: Stepper[_]](implicit ss: StepperShape[K, S]) = { + val tbl = CollectionInternals.getTable[K, collection.mutable.LinkedEntry[K, V]](underlying) + ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntHashTableKey (tbl.asInstanceOf[Array[collection.mutable.HashEntry[Int, _]]], 0, tbl.length) + case StepperShape.LongValue => new StepsLongHashTableKey (tbl.asInstanceOf[Array[collection.mutable.HashEntry[Long, _]]], 0, tbl.length) + case StepperShape.DoubleValue => new StepsDoubleHashTableKey(tbl.asInstanceOf[Array[collection.mutable.HashEntry[Double, _]]], 0, tbl.length) + case _ => ss.parUnbox(new StepsAnyHashTableKey (tbl.asInstanceOf[Array[collection.mutable.HashEntry[K, _]]], 0, tbl.length)) + }).asInstanceOf[S with EfficientSubstep] + } + + def valueStepper[S <: Stepper[_]](implicit ss: StepperShape[V, S]) = { + val tbl = CollectionInternals.getTable[K, collection.mutable.LinkedEntry[K, V]](underlying) + ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntLinkedHashTableValue (tbl.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, Int ]]]], 0, tbl.length) + case StepperShape.LongValue => new StepsLongLinkedHashTableValue (tbl.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, Long ]]]], 0, tbl.length) + case StepperShape.DoubleValue => new StepsDoubleLinkedHashTableValue(tbl.asInstanceOf[Array[collection.mutable.HashEntry[K, collection.mutable.LinkedEntry[K, Double]]]], 0, tbl.length) + case _ => ss.parUnbox(new StepsAnyLinkedHashTableValue (tbl, 0, tbl.length)) + }).asInstanceOf[S with EfficientSubstep] + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsImmHashMap.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsImmHashMap.scala new file mode 100644 index 0000000..352e0a2 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsImmHashMap.scala @@ -0,0 +1,156 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ + +import Stepper._ + +// Note that there are separate implementations for keys, values, and key-value pairs + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +private[java8] class StepsAnyImmHashMap[K, V](_underlying: collection.immutable.HashMap[K, V], _i0: Int, _iN: Int) +extends StepsLikeImmHashMap[K, V, (K, V), StepsAnyImmHashMap[K, V]](_underlying, _i0, _iN) { + protected def demiclone(u: collection.immutable.HashMap[K,V], j0: Int, jN: Int) = new StepsAnyImmHashMap[K, V](u, j0, jN) + def next(): (K, V) = + if (hasNext) { + if (theIterator eq null) theIterator = underlying.iterator + i += 1 + theIterator.next + } + else throwNSEE +} + +private[java8] class StepsAnyImmHashMapKey[K, V](_underlying: collection.immutable.HashMap[K, V], _i0: Int, _iN: Int) +extends StepsLikeImmHashMap[K, V, K, StepsAnyImmHashMapKey[K, V]](_underlying, _i0, _iN) { + protected def demiclone(u: collection.immutable.HashMap[K,V], j0: Int, jN: Int) = new StepsAnyImmHashMapKey[K, V](u, j0, jN) + def next(): K = + if (hasNext) { + if (theIterator eq null) theIterator = underlying.keysIterator + i += 1 + theIterator.next + } + else throwNSEE +} + +private[java8] class StepsAnyImmHashMapValue[K, V](_underlying: collection.immutable.HashMap[K, V], _i0: Int, _iN: Int) +extends StepsLikeImmHashMap[K, V, V, StepsAnyImmHashMapValue[K, V]](_underlying, _i0, _iN) { + protected def demiclone(u: collection.immutable.HashMap[K,V], j0: Int, jN: Int) = new StepsAnyImmHashMapValue[K, V](u, j0, jN) + def next(): V = + if (hasNext) { + if (theIterator eq null) theIterator = underlying.valuesIterator + i += 1 + theIterator.next + } + else throwNSEE +} + +private[java8] class StepsDoubleImmHashMapKey[V](_underlying: collection.immutable.HashMap[Double, V], _i0: Int, _iN: Int) +extends StepsDoubleLikeImmHashMap[Double, V, StepsDoubleImmHashMapKey[V]](_underlying, _i0, _iN) { + protected def demiclone(u: collection.immutable.HashMap[Double,V], j0: Int, jN: Int) = new StepsDoubleImmHashMapKey[V](u, j0, jN) + def nextDouble() = + if (hasNext) { + if (theIterator eq null) theIterator = underlying.keysIterator + i += 1 + theIterator.next + } + else throwNSEE +} + +private[java8] class StepsDoubleImmHashMapValue[K](_underlying: collection.immutable.HashMap[K, Double], _i0: Int, _iN: Int) +extends StepsDoubleLikeImmHashMap[K, Double, StepsDoubleImmHashMapValue[K]](_underlying, _i0, _iN) { + protected def demiclone(u: collection.immutable.HashMap[K,Double], j0: Int, jN: Int) = new StepsDoubleImmHashMapValue[K](u, j0, jN) + def nextDouble() = + if (hasNext) { + if (theIterator eq null) theIterator = underlying.valuesIterator + i += 1 + theIterator.next + } + else throwNSEE +} + +private[java8] class StepsIntImmHashMapKey[V](_underlying: collection.immutable.HashMap[Int, V], _i0: Int, _iN: Int) +extends StepsIntLikeImmHashMap[Int, V, StepsIntImmHashMapKey[V]](_underlying, _i0, _iN) { + protected def demiclone(u: collection.immutable.HashMap[Int,V], j0: Int, jN: Int) = new StepsIntImmHashMapKey[V](u, j0, jN) + def nextInt() = + if (hasNext) { + if (theIterator eq null) theIterator = underlying.keysIterator + i += 1 + theIterator.next + } + else throwNSEE +} + +private[java8] class StepsIntImmHashMapValue[K](_underlying: collection.immutable.HashMap[K, Int], _i0: Int, _iN: Int) +extends StepsIntLikeImmHashMap[K, Int, StepsIntImmHashMapValue[K]](_underlying, _i0, _iN) { + protected def demiclone(u: collection.immutable.HashMap[K,Int], j0: Int, jN: Int) = new StepsIntImmHashMapValue[K](u, j0, jN) + def nextInt() = + if (hasNext) { + if (theIterator eq null) theIterator = underlying.valuesIterator + i += 1 + theIterator.next + } + else throwNSEE +} + +private[java8] class StepsLongImmHashMapKey[V](_underlying: collection.immutable.HashMap[Long, V], _i0: Int, _iN: Int) +extends StepsLongLikeImmHashMap[Long, V, StepsLongImmHashMapKey[V]](_underlying, _i0, _iN) { + protected def demiclone(u: collection.immutable.HashMap[Long,V], j0: Int, jN: Int) = new StepsLongImmHashMapKey[V](u, j0, jN) + def nextLong() = + if (hasNext) { + if (theIterator eq null) theIterator = underlying.keysIterator + i += 1 + theIterator.next + } + else throwNSEE +} + +private[java8] class StepsLongImmHashMapValue[K](_underlying: collection.immutable.HashMap[K, Long], _i0: Int, _iN: Int) +extends StepsLongLikeImmHashMap[K, Long, StepsLongImmHashMapValue[K]](_underlying, _i0, _iN) { + protected def demiclone(u: collection.immutable.HashMap[K,Long], j0: Int, jN: Int) = new StepsLongImmHashMapValue[K](u, j0, jN) + def nextLong() = + if (hasNext) { + if (theIterator eq null) theIterator = underlying.valuesIterator + i += 1 + theIterator.next + } + else throwNSEE +} + +////////////////////////// +// Value class adapters // +////////////////////////// + +final class RichImmHashMapCanStep[K, V](private val underlying: collection.immutable.HashMap[K, V]) extends AnyVal with MakesKeyValueStepper[K, V, EfficientSubstep] with MakesStepper[(K, V), EfficientSubstep] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[(K, V), S]) = + new StepsAnyImmHashMap[K, V](underlying, 0, underlying.size).asInstanceOf[S with EfficientSubstep] + + def keyStepper[S <: Stepper[_]](implicit ss: StepperShape[K, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntImmHashMapKey (underlying.asInstanceOf[collection.immutable.HashMap[Int, V]], 0, underlying.size) + case StepperShape.LongValue => new StepsLongImmHashMapKey (underlying.asInstanceOf[collection.immutable.HashMap[Long, V]], 0, underlying.size) + case StepperShape.DoubleValue => new StepsDoubleImmHashMapKey (underlying.asInstanceOf[collection.immutable.HashMap[Double, V]], 0, underlying.size) + case _ => ss.parUnbox(new StepsAnyImmHashMapKey[K, V](underlying, 0, underlying.size)) + }).asInstanceOf[S with EfficientSubstep] + + def valueStepper[S <: Stepper[_]](implicit ss: StepperShape[V, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntImmHashMapValue (underlying.asInstanceOf[collection.immutable.HashMap[K, Int]], 0, underlying.size) + case StepperShape.LongValue => new StepsLongImmHashMapValue (underlying.asInstanceOf[collection.immutable.HashMap[K, Long]], 0, underlying.size) + case StepperShape.DoubleValue => new StepsDoubleImmHashMapValue (underlying.asInstanceOf[collection.immutable.HashMap[K, Double]], 0, underlying.size) + case _ => ss.parUnbox(new StepsAnyImmHashMapValue[K, V](underlying, 0, underlying.size)) + }).asInstanceOf[S with EfficientSubstep] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsImmHashSet.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsImmHashSet.scala new file mode 100644 index 0000000..8d6cf72 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsImmHashSet.scala @@ -0,0 +1,58 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +private[java8] class StepsAnyImmHashSet[A](_underlying: Iterator[A], _N: Int) +extends StepsLikeTrieIterator[A, StepsAnyImmHashSet[A]](_underlying, _N) { + protected def demiclone(it: Iterator[A], N: Int) = new StepsAnyImmHashSet(it, N) + def next(): A = { val ans = underlying.next; i += 1; ans } +} + +private[java8] class StepsDoubleImmHashSet(_underlying: Iterator[Double], _N: Int) +extends StepsDoubleLikeTrieIterator[StepsDoubleImmHashSet](_underlying, _N) { + protected def demiclone(it: Iterator[Double], N: Int) = new StepsDoubleImmHashSet(it, N) + def nextDouble() = { val ans = underlying.next; i += 1; ans } +} + +private[java8] class StepsIntImmHashSet(_underlying: Iterator[Int], _N: Int) +extends StepsIntLikeTrieIterator[StepsIntImmHashSet](_underlying, _N) { + protected def demiclone(it: Iterator[Int], N: Int) = new StepsIntImmHashSet(it, N) + def nextInt() = { val ans = underlying.next; i += 1; ans } +} + +private[java8] class StepsLongImmHashSet(_underlying: Iterator[Long], _N: Int) +extends StepsLongLikeTrieIterator[StepsLongImmHashSet](_underlying, _N) { + protected def demiclone(it: Iterator[Long], N: Int) = new StepsLongImmHashSet(it, N) + def nextLong() = { val ans = underlying.next; i += 1; ans } +} + +////////////////////////// +// Value class adapters // +////////////////////////// + +final class RichImmHashSetCanStep[T](private val underlying: collection.immutable.HashSet[T]) extends AnyVal with MakesStepper[T, EfficientSubstep] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntImmHashSet (underlying.iterator.asInstanceOf[Iterator[Int]], underlying.size) + case StepperShape.LongValue => new StepsLongImmHashSet (underlying.iterator.asInstanceOf[Iterator[Long]], underlying.size) + case StepperShape.DoubleValue => new StepsDoubleImmHashSet(underlying.iterator.asInstanceOf[Iterator[Double]], underlying.size) + case _ => ss.parUnbox(new StepsAnyImmHashSet[T](underlying.iterator, underlying.size)) + }).asInstanceOf[S with EfficientSubstep] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsIndexedSeq.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsIndexedSeq.scala new file mode 100644 index 0000000..f7120f6 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsIndexedSeq.scala @@ -0,0 +1,60 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ + +import Stepper._ + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +private[java8] class StepsAnyIndexedSeq[A](underlying: collection.IndexedSeqLike[A, _], _i0: Int, _iN: Int) +extends StepsLikeIndexed[A, StepsAnyIndexedSeq[A]](_i0, _iN) { + def next() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsAnyIndexedSeq[A](underlying, i0, half) +} + +private[java8] class StepsDoubleIndexedSeq[CC <: collection.IndexedSeqLike[Double, _]](underlying: CC, _i0: Int, _iN: Int) +extends StepsDoubleLikeIndexed[StepsDoubleIndexedSeq[CC]](_i0, _iN) { + def nextDouble() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsDoubleIndexedSeq[CC](underlying, i0, half) +} + +private[java8] class StepsIntIndexedSeq[CC <: collection.IndexedSeqLike[Int, _]](underlying: CC, _i0: Int, _iN: Int) +extends StepsIntLikeIndexed[StepsIntIndexedSeq[CC]](_i0, _iN) { + def nextInt() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsIntIndexedSeq[CC](underlying, i0, half) +} + +private[java8] class StepsLongIndexedSeq[CC <: collection.IndexedSeqLike[Long, _]](underlying: CC, _i0: Int, _iN: Int) +extends StepsLongLikeIndexed[StepsLongIndexedSeq[CC]](_i0, _iN) { + def nextLong() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsLongIndexedSeq[CC](underlying, i0, half) +} + +////////////////////////// +// Value class adapters // +////////////////////////// + +final class RichIndexedSeqCanStep[T](private val underlying: collection.IndexedSeqLike[T, _]) extends AnyVal with MakesStepper[T, EfficientSubstep] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntIndexedSeq (underlying.asInstanceOf[collection.IndexedSeqLike[Int, _]], 0, underlying.length) + case StepperShape.LongValue => new StepsLongIndexedSeq (underlying.asInstanceOf[collection.IndexedSeqLike[Long, _]], 0, underlying.length) + case StepperShape.DoubleValue => new StepsDoubleIndexedSeq(underlying.asInstanceOf[collection.IndexedSeqLike[Double, _]], 0, underlying.length) + case _ => ss.parUnbox(new StepsAnyIndexedSeq[T](underlying, 0, underlying.length)) + }).asInstanceOf[S with EfficientSubstep] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsIterable.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsIterable.scala new file mode 100644 index 0000000..3f1def6 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsIterable.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ + +// Iterables just defer to iterator unless they can pattern match something better. +// TODO: implement pattern matching! + +final class RichIterableCanStep[T](private val underlying: Iterable[T]) extends AnyVal with MakesStepper[T, Any] { + override def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntIterator (underlying.iterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongValue => new StepsLongIterator (underlying.iterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleValue => new StepsDoubleIterator(underlying.iterator.asInstanceOf[Iterator[Double]]) + case _ => ss.seqUnbox(new StepsAnyIterator[T](underlying.iterator)) + }).asInstanceOf[S] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsIterator.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsIterator.scala new file mode 100644 index 0000000..64c2559 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsIterator.scala @@ -0,0 +1,58 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +private[java8] class StepsAnyIterator[A](_underlying: Iterator[A]) +extends StepsLikeIterator[A, StepsAnyIterator[A]](_underlying) { + def semiclone() = new StepsAnyIterator(null) + def next() = if (proxied ne null) proxied.nextStep else underlying.next +} + +private[java8] class StepsDoubleIterator(_underlying: Iterator[Double]) +extends StepsDoubleLikeIterator[StepsDoubleIterator](_underlying) { + def semiclone() = new StepsDoubleIterator(null) + def nextDouble() = if (proxied ne null) proxied.nextStep else underlying.next +} + +private[java8] class StepsIntIterator(_underlying: Iterator[Int]) +extends StepsIntLikeIterator[StepsIntIterator](_underlying) { + def semiclone() = new StepsIntIterator(null) + def nextInt() = if (proxied ne null) proxied.nextStep else underlying.next +} + +private[java8] class StepsLongIterator(_underlying: Iterator[Long]) +extends StepsLongLikeIterator[StepsLongIterator](_underlying) { + def semiclone() = new StepsLongIterator(null) + def nextLong() = if (proxied ne null) proxied.nextStep else underlying.next +} + +////////////////////////// +// Value class adapters // +////////////////////////// + +final class RichIteratorCanStep[T](private val underlying: Iterator[T]) extends AnyVal with MakesStepper[T, Any] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntIterator (underlying.asInstanceOf[Iterator[Int]]) + case StepperShape.LongValue => new StepsLongIterator (underlying.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleValue => new StepsDoubleIterator(underlying.asInstanceOf[Iterator[Double]]) + case _ => ss.seqUnbox(new StepsAnyIterator[T](underlying)) + }).asInstanceOf[S] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeGapped.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeGapped.scala new file mode 100644 index 0000000..4d6f7cc --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeGapped.scala @@ -0,0 +1,79 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ +import Stepper._ + +/** Abstracts all the generic operations of stepping over a backing array + * for some collection where the elements are stored generically and some + * may be missing. Subclasses should set `currentEntry` to `null` when it + * is used as a signal to look for more entries in the array. (This also + * allows a subclass to traverse a sublist by updating `currentEntry`.) + */ +private[java8] abstract class AbstractStepsLikeGapped[Sub >: Null, Semi <: Sub](protected val underlying: Array[AnyRef], protected var i0: Int, protected var iN: Int) + extends EfficientSubstep { + + protected var currentEntry: AnyRef = null + def semiclone(half: Int): Semi + def characteristics(): Int = Ordered + def estimateSize(): Long = if (!hasNext) 0 else iN - i0 + def hasNext(): Boolean = currentEntry != null || (i0 < iN && { + do { currentEntry = underlying(i0); i0 += 1 } while (currentEntry == null && i0 < iN) + currentEntry != null + }) + def substep(): Sub = { + if (iN-1 > i0) { + val half = (i0+iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} + +/** Abstracts the process of stepping through an incompletely filled array of `AnyRefs` + * and interpreting the contents as the elements of a collection. + */ +private[java8] abstract class StepsLikeGapped[A, STA >: Null <: StepsLikeGapped[A, _]](_underlying: Array[AnyRef], _i0: Int, _iN: Int) + extends AbstractStepsLikeGapped[AnyStepper[A], STA](_underlying, _i0, _iN) + with AnyStepper[A] +{} + +/** Abstracts the process of stepping through an incompletely filled array of `AnyRefs` + * and interpreting the contents as the elements of a collection of `Double`s. Subclasses + * are responsible for unboxing the `AnyRef` inside `nextDouble`. + */ +private[java8] abstract class StepsDoubleLikeGapped[STD >: Null <: StepsDoubleLikeGapped[_]](_underlying: Array[AnyRef], _i0: Int, _iN: Int) + extends AbstractStepsLikeGapped[DoubleStepper, STD](_underlying, _i0, _iN) + with DoubleStepper +{} + +/** Abstracts the process of stepping through an incompletely filled array of `AnyRefs` + * and interpreting the contents as the elements of a collection of `Int`s. Subclasses + * are responsible for unboxing the `AnyRef` inside `nextInt`. + */ +private[java8] abstract class StepsIntLikeGapped[STI >: Null <: StepsIntLikeGapped[_]](_underlying: Array[AnyRef], _i0: Int, _iN: Int) + extends AbstractStepsLikeGapped[IntStepper, STI](_underlying, _i0, _iN) + with IntStepper +{} + +/** Abstracts the process of stepping through an incompletely filled array of `AnyRefs` + * and interpreting the contents as the elements of a collection of `Long`s. Subclasses + * are responsible for unboxing the `AnyRef` inside `nextLong`. + */ +private[java8] abstract class StepsLongLikeGapped[STL >: Null <: StepsLongLikeGapped[_]](_underlying: Array[AnyRef], _i0: Int, _iN: Int) + extends AbstractStepsLikeGapped[LongStepper, STL](_underlying, _i0, _iN) + with LongStepper +{} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeImmHashMap.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeImmHashMap.scala new file mode 100644 index 0000000..e206228 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeImmHashMap.scala @@ -0,0 +1,65 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ +import Stepper._ + +/** Abstracts all the generic operations of stepping over an immutable HashMap by slicing it into pieces. + * `next` must update `i` but not `i0` so that later splitting steps can keep track of whether the + * collection needs some sort of modification before transmission to the subclass. + */ +private[java8] trait AbstractStepsLikeImmHashMap[K, V, A, Sub >: Null, Semi >: Null <: Sub with AbstractStepsLikeImmHashMap[K, V, A, Sub, _]] +extends AbstractStepsLikeSliced[collection.immutable.HashMap[K, V], Sub, Semi] { + protected var theIterator: Iterator[A] = null + protected def demiclone(u: collection.immutable.HashMap[K,V], j0: Int, jN: Int): Semi + override def characteristics() = Immutable + def hasNext(): Boolean = i < iN + def semiclone(halfHint: Int): Semi = + if (i >= iN) null + else underlying match { + case trie: collection.immutable.HashMap.HashTrieMap[K, V] => + val parts = if (i > i0) trie.drop(i-i0).split else trie.split + if (parts.length != 2) null + else { + val ans = demiclone(parts.head, 0, parts.head.size) + i = iN - parts.last.size + underlying = parts.last + ans.theIterator = theIterator + theIterator = null + i0 = i + ans + } + case _ => null + } +} + +private[java8] abstract class StepsLikeImmHashMap[K, V, A, SIHM >: Null <: StepsLikeImmHashMap[K, V, A, _]](_underlying: collection.immutable.HashMap[K, V], _i0: Int, _iN: Int) + extends StepsLikeSliced[A, collection.immutable.HashMap[K, V], SIHM](_underlying, _i0, _iN) + with AbstractStepsLikeImmHashMap[K, V, A, AnyStepper[A], SIHM] +{} + +private[java8] abstract class StepsDoubleLikeImmHashMap[K, V, SIHM >: Null <: StepsDoubleLikeImmHashMap[K, V, SIHM]](_underlying: collection.immutable.HashMap[K, V], _i0: Int, _iN: Int) + extends StepsDoubleLikeSliced[collection.immutable.HashMap[K, V], SIHM](_underlying, _i0, _iN) + with AbstractStepsLikeImmHashMap[K, V, Double, DoubleStepper, SIHM] +{} + +private[java8] abstract class StepsIntLikeImmHashMap[K, V, SIHM >: Null <: StepsIntLikeImmHashMap[K, V, SIHM]](_underlying: collection.immutable.HashMap[K, V], _i0: Int, _iN: Int) + extends StepsIntLikeSliced[collection.immutable.HashMap[K, V], SIHM](_underlying, _i0, _iN) + with AbstractStepsLikeImmHashMap[K, V, Int, IntStepper, SIHM] +{} + +private[java8] abstract class StepsLongLikeImmHashMap[K, V, SIHM >: Null <: StepsLongLikeImmHashMap[K, V, SIHM]](_underlying: collection.immutable.HashMap[K, V], _i0: Int, _iN: Int) + extends StepsLongLikeSliced[collection.immutable.HashMap[K, V], SIHM](_underlying, _i0, _iN) + with AbstractStepsLikeImmHashMap[K, V, Long, LongStepper, SIHM] +{} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeIndexed.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeIndexed.scala new file mode 100644 index 0000000..ce4edbe --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeIndexed.scala @@ -0,0 +1,62 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ +import Stepper._ + +/** Abstracts all the generic operations of stepping over an indexable collection */ +private[java8] abstract class AbstractStepsLikeIndexed[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) + extends EfficientSubstep { + + def semiclone(half: Int): Semi + def characteristics(): Int = Ordered + Sized + SubSized + def estimateSize(): Long = iN - i0 + def hasNext(): Boolean = i0 < iN + def substep(): Sub = { + if (iN-1 > i0) { + val half = (i0+iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} + +/** Abstracts the operation of stepping over a generic indexable collection */ +private[java8] abstract class StepsLikeIndexed[A, STA >: Null <: StepsLikeIndexed[A, _]](_i0: Int, _iN: Int) + extends AbstractStepsLikeIndexed[AnyStepper[A], STA](_i0, _iN) + with AnyStepper[A] +{} + +/** Abstracts the operation of stepping over an indexable collection of Doubles */ +private[java8] abstract class StepsDoubleLikeIndexed[STD >: Null <: StepsDoubleLikeIndexed[_]](_i0: Int, _iN: Int) + extends AbstractStepsLikeIndexed[DoubleStepper, STD](_i0, _iN) + with DoubleStepper + with java.util.Spliterator.OfDouble // Compiler wants this for mixin forwarder +{} + +/** Abstracts the operation of stepping over an indexable collection of Ints */ +private[java8] abstract class StepsIntLikeIndexed[STI >: Null <: StepsIntLikeIndexed[_]](_i0: Int, _iN: Int) + extends AbstractStepsLikeIndexed[IntStepper, STI](_i0, _iN) + with IntStepper + with java.util.Spliterator.OfInt // Compiler wants this for mixin forwarder +{} + +/** Abstracts the operation of stepping over an indexable collection of Longs */ +private[java8] abstract class StepsLongLikeIndexed[STL >: Null <: StepsLongLikeIndexed[_]](_i0: Int, _iN: Int) + extends AbstractStepsLikeIndexed[LongStepper, STL](_i0, _iN) + with LongStepper + with java.util.Spliterator.OfLong // Compiler wants this for mixin forwarder +{} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeIterator.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeIterator.scala new file mode 100644 index 0000000..ac78af3 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeIterator.scala @@ -0,0 +1,118 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ +import Stepper._ + +/** Common functionality for Steppers that step through an Iterator, caching the results as needed when a split is requested. */ +private[java8] abstract class AbstractStepsLikeIterator[A, SP >: Null <: Stepper[A], Semi <: SP](final protected var underlying: Iterator[A]) { + final protected var nextChunkSize = 16 + final protected var proxied: SP = null + def semiclone(): Semi // Must initialize with null iterator! + def characteristics(): Int = if (proxied ne null) Ordered | Sized | SubSized else Ordered + def estimateSize(): Long = if (proxied ne null) proxied.knownSize else Long.MaxValue + def hasNext(): Boolean = if (proxied ne null) proxied.hasStep else underlying.hasNext +} + +/** Abstracts the operation of stepping over an iterator (that needs to be cached when splitting) */ +private[java8] abstract class StepsLikeIterator[A, SLI >: Null <: StepsLikeIterator[A, SLI] with AnyStepper[A]](_underlying: Iterator[A]) + extends AbstractStepsLikeIterator[A, AnyStepper[A], SLI](_underlying) + with AnyStepper[A] +{ + override def substep(): AnyStepper[A] = if (proxied ne null) proxied.substep else { + val acc = new Accumulator[A] + var i = 0 + val n = (nextChunkSize & 0xFFFFFFFC) + while (i < n && underlying.hasNext) { acc += underlying.next; i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.substep + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +/** Abstracts the operation of stepping over an iterator of Doubles (needs caching when split) */ +private[java8] abstract class StepsDoubleLikeIterator[SLI >: Null <: StepsDoubleLikeIterator[SLI] with DoubleStepper](_underlying: Iterator[Double]) + extends AbstractStepsLikeIterator[Double, DoubleStepper, SLI](_underlying) + with DoubleStepper +{ + override def substep(): DoubleStepper = if (proxied ne null) proxied.substep else { + val acc = new DoubleAccumulator + var i = 0 + val n = (nextChunkSize & 0xFFFFFFFC) + while (i < n && underlying.hasNext) { acc += underlying.next; i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.substep + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +/** Abstracts the operation of stepping over an iterator of Ints (needs caching when split) */ +private[java8] abstract class StepsIntLikeIterator[SLI >: Null <: StepsIntLikeIterator[SLI] with IntStepper](_underlying: Iterator[Int]) + extends AbstractStepsLikeIterator[Int, IntStepper, SLI](_underlying) + with IntStepper +{ + override def substep(): IntStepper = if (proxied ne null) proxied.substep else { + val acc = new IntAccumulator + var i = 0 + val n = (nextChunkSize & 0xFFFFFFFC) + while (i < n && underlying.hasNext) { acc += underlying.next; i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.substep + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +/** Abstracts the operation of stepping over an iterator of Longs (needs caching when split) */ +private[java8] abstract class StepsLongLikeIterator[SLI >: Null <: StepsLongLikeIterator[SLI] with LongStepper](_underlying: Iterator[Long]) + extends AbstractStepsLikeIterator[Long, LongStepper, SLI](_underlying) + with LongStepper +{ + override def substep: LongStepper = if (proxied ne null) proxied.substep else { + val acc = new LongAccumulator + var i = 0 + val n = (nextChunkSize & 0xFFFFFFFC) + while (i < n && underlying.hasNext) { acc += underlying.next; i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.substep + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeSliced.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeSliced.scala new file mode 100644 index 0000000..5434b0f --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeSliced.scala @@ -0,0 +1,54 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ +import Stepper._ + +/** Abstracts all the generic operations of stepping over a collection that can be sliced into pieces. + * `next` must update `i` but not `i0` so that later splitting steps can keep track of whether the + * collection needs some sort of modification before transmission to the subclass. + */ +private[java8] abstract class AbstractStepsLikeSliced[Coll, Sub >: Null, Semi <: Sub](protected var underlying: Coll, protected var i: Int, protected var iN: Int) + extends EfficientSubstep { + + protected var i0: Int = i + def semiclone(halfHint: Int): Semi // Must really do all the work for both this and cloned collection! + def characteristics(): Int = Ordered + def estimateSize(): Long = iN - i + def substep(): Sub = if (estimateSize > 0) semiclone((iN + i) >>> 1) else null +} + +/** Abstracts the operation of stepping over a generic collection that can be efficiently sliced or otherwise subdivided */ +private[java8] abstract class StepsLikeSliced[A, AA, STA >: Null <: StepsLikeSliced[A, AA, _]](_underlying: AA, _i0: Int, _iN: Int) + extends AbstractStepsLikeSliced[AA, AnyStepper[A], STA](_underlying, _i0, _iN) + with AnyStepper[A] +{} + +/** Abstracts the operation of stepping over a collection of Doubles that can be efficiently sliced or otherwise subdivided */ +private[java8] abstract class StepsDoubleLikeSliced[AA, STA >: Null <: StepsDoubleLikeSliced[AA, STA]](_underlying: AA, _i0: Int, _iN: Int) + extends AbstractStepsLikeSliced[AA, DoubleStepper, STA](_underlying, _i0, _iN) + with DoubleStepper +{} + +/** Abstracts the operation of stepping over a collection of Ints that can be efficiently sliced or otherwise subdivided */ +private[java8] abstract class StepsIntLikeSliced[AA, STA >: Null <: StepsIntLikeSliced[AA, STA]](_underlying: AA, _i0: Int, _iN: Int) + extends AbstractStepsLikeSliced[AA, IntStepper, STA](_underlying, _i0, _iN) + with IntStepper +{} + +/** Abstracts the operation of stepping over a collection of Longs that can be efficiently sliced or otherwise subdivided */ +private[java8] abstract class StepsLongLikeSliced[AA, STA >: Null <: StepsLongLikeSliced[AA, STA]](_underlying: AA, _i0: Int, _iN: Int) + extends AbstractStepsLikeSliced[AA, LongStepper, STA](_underlying, _i0, _iN) + with LongStepper +{} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeTrieIterator.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeTrieIterator.scala new file mode 100644 index 0000000..ab2ccaa --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLikeTrieIterator.scala @@ -0,0 +1,59 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ +import Stepper._ + +/** Abstracts all the generic operations of stepping over a TrieIterator by asking itself to + * slice itself into pieces. Note that `i` must be kept up to date in subclasses. + */ +private[java8] trait AbstractStepsLikeTrieIterator[A, Sub >: Null, Semi >: Null <: Sub with AbstractStepsLikeTrieIterator[A, Sub, _]] +extends AbstractStepsLikeSliced[Iterator[A], Sub, Semi] { + protected def demiclone(it: Iterator[A], N: Int): Semi + override def characteristics() = Immutable + def hasNext(): Boolean = underlying.hasNext + def semiclone(halfHint: Int): Semi = + if (!underlying.hasNext || i > iN-2) null + else scala.compat.java8.runtime.CollectionInternals.trieIteratorSplit(underlying) match { + case null => null + case ((pre: Iterator[A], pno), post: Iterator[A]) => + val pn = (pno: Any) match { case i: Int => i; case _ => throw new Exception("Unexpected type") } + val ans = demiclone(pre, pn) + i += pn + underlying = post + i0 = i + ans + case _ => null + } +} + +private[java8] abstract class StepsLikeTrieIterator[A, STI >: Null <: StepsLikeTrieIterator[A, _]](_underlying: Iterator[A], _N: Int) + extends StepsLikeSliced[A, Iterator[A], STI](_underlying, 0, _N) + with AbstractStepsLikeTrieIterator[A, AnyStepper[A], STI] +{} + +private[java8] abstract class StepsDoubleLikeTrieIterator[STI >: Null <: StepsDoubleLikeTrieIterator[STI]](_underlying: Iterator[Double], _N: Int) + extends StepsDoubleLikeSliced[Iterator[Double], STI](_underlying, 0, _N) + with AbstractStepsLikeTrieIterator[Double, DoubleStepper, STI] +{} + +private[java8] abstract class StepsIntLikeTrieIterator[STI >: Null <: StepsIntLikeTrieIterator[STI]](_underlying: Iterator[Int], _N: Int) + extends StepsIntLikeSliced[Iterator[Int], STI](_underlying, 0, _N) + with AbstractStepsLikeTrieIterator[Int, IntStepper, STI] +{} + +private[java8] abstract class StepsLongLikeTrieIterator[STI >: Null <: StepsLongLikeTrieIterator[STI]](_underlying: Iterator[Long], _N: Int) + extends StepsLongLikeSliced[Iterator[Long], STI](_underlying, 0, _N) + with AbstractStepsLikeTrieIterator[Long, LongStepper, STI] +{} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLinearSeq.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLinearSeq.scala new file mode 100644 index 0000000..e480dab --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsLinearSeq.scala @@ -0,0 +1,68 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ + +import Stepper._ + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +private[java8] class StepsAnyLinearSeq[A](_underlying: collection.LinearSeq[A], _maxN: Long) +extends StepsWithTail[A, collection.LinearSeq[A], StepsAnyLinearSeq[A]](_underlying, _maxN) { + protected def myIsEmpty(cc: collection.LinearSeq[A]): Boolean = cc.isEmpty + protected def myTailOf(cc: collection.LinearSeq[A]) = cc.tail + def next() = if (hasNext()) { maxN -= 1; val ans = underlying.head; underlying = underlying.tail; ans } else throwNSEE + def semiclone(half: Int) = new StepsAnyLinearSeq[A](underlying, half) +} + +private[java8] class StepsDoubleLinearSeq(_underlying: collection.LinearSeq[Double], _maxN: Long) +extends StepsDoubleWithTail[collection.LinearSeq[Double], StepsDoubleLinearSeq](_underlying, _maxN) { + protected def myIsEmpty(cc: collection.LinearSeq[Double]): Boolean = cc.isEmpty + protected def myTailOf(cc: collection.LinearSeq[Double]) = cc.tail + def nextDouble() = if (hasNext()) { maxN -= 1; val ans = underlying.head; underlying = underlying.tail; ans } else throwNSEE + def semiclone(half: Int) = new StepsDoubleLinearSeq(underlying, half) +} + +private[java8] class StepsIntLinearSeq(_underlying: collection.LinearSeq[Int], _maxN: Long) +extends StepsIntWithTail[collection.LinearSeq[Int], StepsIntLinearSeq](_underlying, _maxN) { + protected def myIsEmpty(cc: collection.LinearSeq[Int]): Boolean = cc.isEmpty + protected def myTailOf(cc: collection.LinearSeq[Int]) = cc.tail + def nextInt() = if (hasNext()) { maxN -= 1; val ans = underlying.head; underlying = underlying.tail; ans } else throwNSEE + def semiclone(half: Int) = new StepsIntLinearSeq(underlying, half) +} + +private[java8] class StepsLongLinearSeq(_underlying: collection.LinearSeq[Long], _maxN: Long) +extends StepsLongWithTail[collection.LinearSeq[Long], StepsLongLinearSeq](_underlying, _maxN) { + protected def myIsEmpty(cc: collection.LinearSeq[Long]): Boolean = cc.isEmpty + protected def myTailOf(cc: collection.LinearSeq[Long]) = cc.tail + def nextLong() = if (hasNext()) { maxN -= 1; val ans = underlying.head; underlying = underlying.tail; ans } else throwNSEE + def semiclone(half: Int) = new StepsLongLinearSeq(underlying, half) +} + +////////////////////////// +// Value class adapters // +////////////////////////// + +final class RichLinearSeqCanStep[T](private val underlying: collection.LinearSeq[T]) extends AnyVal with MakesStepper[T, Any] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntLinearSeq (underlying.asInstanceOf[collection.LinearSeq[Int]], Long.MaxValue) + case StepperShape.LongValue => new StepsLongLinearSeq (underlying.asInstanceOf[collection.LinearSeq[Long]], Long.MaxValue) + case StepperShape.DoubleValue => new StepsDoubleLinearSeq(underlying.asInstanceOf[collection.LinearSeq[Double]], Long.MaxValue) + case _ => ss.seqUnbox(new StepsAnyLinearSeq[T](underlying, Long.MaxValue)) + }).asInstanceOf[S] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsMap.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsMap.scala new file mode 100644 index 0000000..cc5013e --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsMap.scala @@ -0,0 +1,38 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ + +// Generic maps defer to the iterator steppers if a more precise type cannot be found via pattern matching +// TODO: implement pattern matching + +final class RichMapCanStep[K, V](private val underlying: collection.Map[K, V]) extends AnyVal with MakesKeyValueStepper[K, V, Any] { + // No generic stepper because RichIterableCanStep will get that anyway, and we don't pattern match here + + def keyStepper[S <: Stepper[_]](implicit ss: StepperShape[K, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntIterator (underlying.keysIterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongValue => new StepsLongIterator (underlying.keysIterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleValue => new StepsDoubleIterator(underlying.keysIterator.asInstanceOf[Iterator[Double]]) + case _ => ss.seqUnbox(new StepsAnyIterator (underlying.keysIterator)) + }).asInstanceOf[S] + + def valueStepper[S <: Stepper[_]](implicit ss: StepperShape[V, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntIterator (underlying.valuesIterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongValue => new StepsLongIterator (underlying.valuesIterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleValue => new StepsDoubleIterator(underlying.valuesIterator.asInstanceOf[Iterator[Double]]) + case _ => ss.seqUnbox(new StepsAnyIterator (underlying.valuesIterator)) + }).asInstanceOf[S] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsRange.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsRange.scala new file mode 100644 index 0000000..48060a7 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsRange.scala @@ -0,0 +1,64 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.compat.java8.collectionImpl._ + +import Stepper._ + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +private[java8] class StepsIntRange(underlying: Range, _i0: Int, _iN: Int) +extends StepsIntLikeIndexed[StepsIntRange](_i0, _iN) { + def nextInt() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsIntRange(underlying, i0, half) +} + +private[java8] class StepsAnyNumericRange[T](underlying: collection.immutable.NumericRange[T], _i0: Int, _iN: Int) +extends StepsLikeIndexed[T, StepsAnyNumericRange[T]](_i0, _iN) { + def next() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsAnyNumericRange[T](underlying, i0, half) +} + +private[java8] class StepsIntNumericRange(underlying: collection.immutable.NumericRange[Int], _i0: Int, _iN: Int) +extends StepsIntLikeIndexed[StepsIntNumericRange](_i0, _iN) { + def nextInt() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsIntNumericRange(underlying, i0, half) +} + +private[java8] class StepsLongNumericRange(underlying: collection.immutable.NumericRange[Long], _i0: Int, _iN: Int) +extends StepsLongLikeIndexed[StepsLongNumericRange](_i0, _iN) { + def nextLong() = if (hasNext()) { val j = i0; i0 += 1; underlying(j) } else throwNSEE + def semiclone(half: Int) = new StepsLongNumericRange(underlying, i0, half) +} + +////////////////////////// +// Value class adapters // +////////////////////////// + +final class RichRangeCanStep[T](private val underlying: Range) extends AnyVal with MakesStepper[Int, EfficientSubstep] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[Int, S]) = + new StepsIntRange(underlying, 0, underlying.length).asInstanceOf[S with EfficientSubstep] +} + +final class RichNumericRangeCanStep[T](private val underlying: collection.immutable.NumericRange[T]) extends AnyVal with MakesStepper[T, EfficientSubstep] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntNumericRange (underlying.asInstanceOf[collection.immutable.NumericRange[Int]], 0, underlying.length) + case StepperShape.LongValue => new StepsLongNumericRange (underlying.asInstanceOf[collection.immutable.NumericRange[Long]], 0, underlying.length) + case _ => ss.parUnbox(new StepsAnyNumericRange[T](underlying, 0, underlying.length)) + }).asInstanceOf[S with EfficientSubstep] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsString.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsString.scala new file mode 100644 index 0000000..b2993b2 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsString.scala @@ -0,0 +1,61 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ + +import Stepper._ + +//////////////////////////// +// Stepper implementation // +//////////////////////////// + +private[java8] class StepperStringChar(underlying: CharSequence, _i0: Int, _iN: Int) + extends StepsIntLikeIndexed[StepperStringChar](_i0, _iN) { + def nextInt() = if (hasNext()) { val j = i0; i0 += 1; underlying.charAt(j) } else throwNSEE + def semiclone(half: Int) = new StepperStringChar(underlying, i0, half) +} + +private[java8] class StepperStringCodePoint(underlying: String, var i0: Int, var iN: Int) extends IntStepper with EfficientSubstep { + def characteristics() = NonNull + def estimateSize = iN - i0 + def hasNext = i0 < iN + def nextInt() = { + if (hasNext()) { + val cp = underlying.codePointAt(i0) + i0 += java.lang.Character.charCount(cp) + cp + } + else throwNSEE + } + def substep() = { + if (iN-3 > i0) { + var half = (i0+iN) >>> 1 + if (java.lang.Character.isLowSurrogate(underlying.charAt(half))) half -= 1 + val ans = new StepperStringCodePoint(underlying, i0, half) + i0 = half + ans + } + else null + } +} + +///////////////////////// +// Value class adapter // +///////////////////////// + +final class RichStringCanStep(private val underlying: String) extends AnyVal with MakesStepper[Char, EfficientSubstep] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[Char, S]) = charStepper.asInstanceOf[S with EfficientSubstep] + @inline def charStepper: IntStepper with EfficientSubstep = new StepperStringChar(underlying, 0, underlying.length) + @inline def codepointStepper: IntStepper with EfficientSubstep = new StepperStringCodePoint(underlying, 0, underlying.length) +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsVector.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsVector.scala new file mode 100644 index 0000000..e9abb51 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsVector.scala @@ -0,0 +1,202 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.annotation.switch + +import scala.collection.immutable.VectorIterator + +import scala.compat.java8.collectionImpl._ +import scala.compat.java8.runtime._ + +import Stepper._ + +///////////////////////////// +// Stepper implementations // +///////////////////////////// + +private[java8] trait StepsVectorLike[A] { + protected def myVector: Vector[A] + protected def myVectorIterator: VectorIterator[A] + protected def myVectorLength: Int + protected var index: Int = 32 + protected var data: Array[AnyRef] = null + protected var index1: Int = 32 + protected var data1: Array[AnyRef] = null + protected final def advanceData(iX: Int): Unit = { + index1 += 1 + if (index >= 32) { + if (myVector != null) initTo(iX) + else initVpTo(iX) + } + else { + data = data1(index1).asInstanceOf[Array[AnyRef]] + index = 0 + } + } + protected final def initTo(iX: Int): Unit = { + // WARNING--initVpTo is an exact copy of this except for the type! If you change one you must change the other! + // (Manually specialized this way for speed.) + myVectorLength match { + case x if x <= 0x20 => + index = iX + data = CollectionInternals.getDisplay0(myVector) + case x if x <= 0x400 => + index1 = iX >>> 5 + data1 = CollectionInternals.getDisplay1(myVector) + index = iX & 0x1F + data = data1(index1).asInstanceOf[Array[AnyRef]] + case x => + var N = 0 + var dataN: Array[AnyRef] = + if (x <= 0x8000) { N = 2; CollectionInternals.getDisplay2(myVector) } + else if (x <= 0x100000) { N = 3; CollectionInternals.getDisplay3(myVector) } + else if (x <= 0x2000000) { N = 4; CollectionInternals.getDisplay4(myVector) } + else /*x <= 0x40000000*/{ N = 5; CollectionInternals.getDisplay5(myVector) } + while (N > 2) { + dataN = dataN((iX >>> (5*N))&0x1F).asInstanceOf[Array[AnyRef]] + N -= 1 + } + index1 = (iX >>> 5) & 0x1F + data1 = dataN((iX >>> 10) & 0x1F).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + data = data1(index1).asInstanceOf[Array[AnyRef]] + } + } + protected final def initVpTo(iX: Int): Unit = { + // WARNING--this is an exact copy of initTo! If you change one you must change the other! + // (Manually specialized this way for speed.) + myVectorLength match { + case x if x <= 0x20 => + index = iX + data = CollectionInternals.getDisplay0(myVectorIterator) + case x if x <= 0x400 => + index1 = iX >>> 5 + data1 = CollectionInternals.getDisplay1(myVectorIterator) + index = iX & 0x1F + data = data1(index1).asInstanceOf[Array[AnyRef]] + case x => + var N = 0 + var dataN: Array[AnyRef] = + if (x <= 0x8000) { N = 2; CollectionInternals.getDisplay2(myVectorIterator) } + else if (x <= 0x100000) { N = 3; CollectionInternals.getDisplay3(myVectorIterator) } + else if (x <= 0x2000000) { N = 4; CollectionInternals.getDisplay4(myVectorIterator) } + else /*x <= 0x40000000*/{ N = 5; CollectionInternals.getDisplay5(myVectorIterator) } + while (N > 2) { + dataN = dataN((iX >>> (5*N))&0x1F).asInstanceOf[Array[AnyRef]] + N -= 1 + } + index1 = (iX >>> 5) & 0x1F + data1 = dataN((iX >>> 10) & 0x1F).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + data = data1(index1).asInstanceOf[Array[AnyRef]] + } + } +} + +private[java8] class StepsAnyVector[A](underlying: Vector[A], _i0: Int, _iN: Int) +extends StepsLikeIndexed[A, StepsAnyVector[A]](_i0, _iN) +with StepsVectorLike[A] { + protected val myVector = if (CollectionInternals.getDirt(underlying)) null else underlying + protected val myVectorIterator = if (myVector == null) underlying.iterator else null + protected val myVectorLength = underlying.length + def next() = if (hasNext()) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + data(index).asInstanceOf[A] + } else throwNSEE + def semiclone(half: Int) = { + val ans = new StepsAnyVector(underlying, i0, half) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[java8] class StepsDoubleVector(underlying: Vector[Double], _i0: Int, _iN: Int) +extends StepsDoubleLikeIndexed[StepsDoubleVector](_i0, _iN) +with StepsVectorLike[Double] { + protected val myVector = if (CollectionInternals.getDirt(underlying)) null else underlying + protected val myVectorIterator = if (myVector == null) underlying.iterator else null + protected val myVectorLength = underlying.length + def nextDouble() = if (hasNext()) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + data(index).asInstanceOf[Double] + } else throwNSEE + def semiclone(half: Int) = { + val ans = new StepsDoubleVector(underlying, i0, half) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[java8] class StepsIntVector(underlying: Vector[Int], _i0: Int, _iN: Int) +extends StepsIntLikeIndexed[StepsIntVector](_i0, _iN) +with StepsVectorLike[Int] { + protected val myVector = if (CollectionInternals.getDirt(underlying)) null else underlying + protected val myVectorIterator = if (myVector == null) underlying.iterator else null + protected val myVectorLength = underlying.length + def nextInt() = if (hasNext()) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + data(index).asInstanceOf[Int] + } else throwNSEE + def semiclone(half: Int) = { + val ans = new StepsIntVector(underlying, i0, half) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[java8] class StepsLongVector(underlying: Vector[Long], _i0: Int, _iN: Int) +extends StepsLongLikeIndexed[StepsLongVector](_i0, _iN) +with StepsVectorLike[Long] { + protected val myVector = if (CollectionInternals.getDirt(underlying)) null else underlying + protected val myVectorIterator = if (myVector == null) underlying.iterator else null + protected val myVectorLength = underlying.length + def nextLong() = if (hasNext()) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + data(index).asInstanceOf[Long] + } else throwNSEE + def semiclone(half: Int) = { + val ans = new StepsLongVector(underlying, i0, half) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +////////////////////////// +// Value class adapters // +////////////////////////// + +final class RichVectorCanStep[T](private val underlying: Vector[T]) extends AnyVal with MakesStepper[T, EfficientSubstep] { + def stepper[S <: Stepper[_]](implicit ss: StepperShape[T, S]) = ((ss.shape: @switch) match { + case StepperShape.IntValue => new StepsIntVector (underlying.asInstanceOf[Vector[Int]], 0, underlying.length) + case StepperShape.LongValue => new StepsLongVector (underlying.asInstanceOf[Vector[Long]], 0, underlying.length) + case StepperShape.DoubleValue => new StepsDoubleVector(underlying.asInstanceOf[Vector[Double]], 0, underlying.length) + case _ => ss.parUnbox(new StepsAnyVector[T](underlying, 0, underlying.length)) + }).asInstanceOf[S with EfficientSubstep] +} diff --git a/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsWithTail.scala b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsWithTail.scala new file mode 100644 index 0000000..c87f798 --- /dev/null +++ b/src/main/scala-2.13-/scala/compat/java8/converterImpl/StepsWithTail.scala @@ -0,0 +1,98 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.converterImpl + +import scala.compat.java8.collectionImpl._ +import Stepper._ + +/** Abstracts all the generic operations of stepping over a collection with a fast tail operation. + * Because of how Java 8 streams subdivide their spliterators, we do this by chunking a bit at + * a time, generating a long chain of chunks. These won't necessarily all be the same size, + * but if there are enough hopefully it won't matter. + * + * Subclasses MUST decrement `maxN` when consuming elements, or this will not work! + */ +private[java8] abstract class AbstractStepsWithTail[CC >: Null, Sub >: Null, Semi <: Sub](final protected var underlying: CC, final protected var maxN: Long) { + private var nextChunkSize: Int = 0 + protected def myIsEmpty(cc: CC): Boolean + protected def myTailOf(cc: CC): CC + def prepareParallelOperation(): this.type = { + if (maxN >= Int.MaxValue && nextChunkSize == 0) { + // Need parallel context to know whether to run this or not! + var u = underlying + var i = 0 + while (i < 1024 && !myIsEmpty(u)) { u = myTailOf(u); i += 1 } + if (i < 1024) maxN = i + else nextChunkSize = 64 // Guaranteed at least 16 chunks + } + this + } + def semiclone(chunk: Int): Semi + def characteristics(): Int = if (maxN < Int.MaxValue) Ordered | Sized | SubSized else Ordered + def estimateSize(): Long = if (maxN < Int.MaxValue) maxN else Long.MaxValue + def hasNext(): Boolean = if (maxN < Int.MaxValue) maxN > 0 else if (myIsEmpty(underlying)) { maxN = 0; false } else true + def substep(): Sub = { + prepareParallelOperation() + maxN match { + case x if x < 2 => null + case x if x >= Int.MaxValue => + var u = underlying + var i = 0 + val n = (nextChunkSize & 0xFFFFFFFC) // Use bottom two bits to count up + while (i < n && !myIsEmpty(u)) { + u = myTailOf(u) + i += 1 + } + if (myIsEmpty(u)) { + maxN = i + substep() // Different branch now, recursion is an easy way to get it + } + else { + val sub = semiclone(n) + underlying = u + if ((nextChunkSize & 3) == 3) nextChunkSize = if (n < 0x40000000) 2*n else n else nextChunkSize += 1 + sub + } + case x => + var half = x.toInt >>> 1 + val sub = semiclone(half) + maxN -= half + while (half > 0) { underlying = myTailOf(underlying); half -= 1 } + sub + } + } +} + +/** Abstracts the operation of stepping over a generic indexable collection */ +private[java8] abstract class StepsWithTail[A, CC >: Null, STA >: Null <: StepsWithTail[A, CC, _]](_underlying: CC, _maxN: Long) + extends AbstractStepsWithTail[CC, AnyStepper[A], STA](_underlying, _maxN) + with AnyStepper[A] +{} + +/** Abstracts the operation of stepping over an indexable collection of Doubles */ +private[java8] abstract class StepsDoubleWithTail[CC >: Null, STD >: Null <: StepsDoubleWithTail[CC, _]](_underlying: CC, _maxN: Long) + extends AbstractStepsWithTail[CC, DoubleStepper, STD](_underlying, _maxN) + with DoubleStepper +{} + +/** Abstracts the operation of stepping over an indexable collection of Ints */ +private[java8] abstract class StepsIntWithTail[CC >: Null, STI >: Null <: StepsIntWithTail[CC, _]](_underlying: CC, _maxN: Long) + extends AbstractStepsWithTail[CC, IntStepper, STI](_underlying, _maxN) + with IntStepper +{} + +/** Abstracts the operation of stepping over an indexable collection of Longs */ +private[java8] abstract class StepsLongWithTail[CC >: Null, STL >: Null <: StepsLongWithTail[CC, _]](_underlying: CC, _maxN: Long) + extends AbstractStepsWithTail[CC, LongStepper, STL](_underlying, _maxN) + with LongStepper +{} diff --git a/src/main/scala-2.13-/scala/concurrent/java8/FuturesConvertersImplCompat.scala b/src/main/scala-2.13-/scala/concurrent/java8/FuturesConvertersImplCompat.scala new file mode 100644 index 0000000..206e76f --- /dev/null +++ b/src/main/scala-2.13-/scala/concurrent/java8/FuturesConvertersImplCompat.scala @@ -0,0 +1,20 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.concurrent.java8 + +import scala.concurrent.Future + +// TODO: make this private[scala] when genjavadoc allows for that. +object FuturesConvertersImplCompat { + def InternalCallbackExecutor = Future.InternalCallbackExecutor +} diff --git a/src/main/scala/scala/compat/java8/DurationConverters.scala b/src/main/scala/scala/compat/java8/DurationConverters.scala new file mode 100644 index 0000000..55203b2 --- /dev/null +++ b/src/main/scala/scala/compat/java8/DurationConverters.scala @@ -0,0 +1,88 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import java.time.temporal.ChronoUnit +import java.util.concurrent.TimeUnit +import java.time.{Duration => JavaDuration} + +import scala.concurrent.duration.{FiniteDuration, Duration => ScalaDuration} + + +/** + * This class contains static methods which convert between Java Durations + * and the durations from the Scala concurrency package. This is useful when mediating between Scala and Java + * libraries with asynchronous APIs where timeouts for example are often expressed as durations. + */ +object DurationConverters { + + /** + * Transform a Java duration into a Scala duration. If the nanosecond part of the Java duration is zero the returned + * duration will have a time unit of seconds and if there is a nanoseconds part the Scala duration will have a time + * unit of nanoseconds. + * + * @throws IllegalArgumentException If the given Java Duration is out of bounds of what can be expressed with the + * Scala FiniteDuration. + */ + final def toScala(duration: java.time.Duration): scala.concurrent.duration.FiniteDuration = { + val originalSeconds = duration.getSeconds + val originalNanos = duration.getNano + if (originalNanos == 0) { + if (originalSeconds == 0) ScalaDuration.Zero + else FiniteDuration(originalSeconds, TimeUnit.SECONDS) + } else if (originalSeconds == 0) { + FiniteDuration(originalNanos, TimeUnit.NANOSECONDS) + } else { + try { + val secondsAsNanos = Math.multiplyExact(originalSeconds, 1000000000) + val totalNanos = secondsAsNanos + originalNanos + if ((totalNanos < 0 && secondsAsNanos < 0) || (totalNanos > 0 && secondsAsNanos > 0)) FiniteDuration(totalNanos, TimeUnit.NANOSECONDS) + else throw new ArithmeticException() + } catch { + case _: ArithmeticException => throw new IllegalArgumentException(s"Java duration $duration cannot be expressed as a Scala duration") + } + } + } + + /** + * Transform a Scala FiniteDuration into a Java duration. Note that the Scala duration keeps the time unit it was created + * with while a Java duration always is a pair of seconds and nanos, so the unit it lost. + */ + final def toJava(duration: scala.concurrent.duration.FiniteDuration): java.time.Duration = { + if (duration.length == 0) JavaDuration.ZERO + else duration.unit match { + case TimeUnit.NANOSECONDS => JavaDuration.ofNanos(duration.length) + case TimeUnit.MICROSECONDS => JavaDuration.of(duration.length, ChronoUnit.MICROS) + case TimeUnit.MILLISECONDS => JavaDuration.ofMillis(duration.length) + case TimeUnit.SECONDS => JavaDuration.ofSeconds(duration.length) + case TimeUnit.MINUTES => JavaDuration.ofMinutes(duration.length) + case TimeUnit.HOURS => JavaDuration.ofHours(duration.length) + case TimeUnit.DAYS => JavaDuration.ofDays(duration.length) + } + } + + implicit final class DurationOps(val duration: java.time.Duration) extends AnyVal { + /** + * See [[DurationConverters#toScala]] + */ + def toScala: scala.concurrent.duration.FiniteDuration = DurationConverters.toScala(duration) + } + + implicit final class FiniteDurationops(val duration: scala.concurrent.duration.FiniteDuration) extends AnyVal { + /** + * See [[DurationConverters#toJava]] + */ + def toJava: java.time.Duration = DurationConverters.toJava(duration) + } + +} diff --git a/src/main/scala/scala/compat/java8/FutureConverters.scala b/src/main/scala/scala/compat/java8/FutureConverters.scala index 86fdddf..713d9e2 100644 --- a/src/main/scala/scala/compat/java8/FutureConverters.scala +++ b/src/main/scala/scala/compat/java8/FutureConverters.scala @@ -1,13 +1,24 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8 +import scala.language.implicitConversions + import scala.concurrent.java8.FuturesConvertersImpl._ -import scala.concurrent.{ Future, Promise, ExecutionContext, ExecutionContextExecutorService, ExecutionContextExecutor, impl } -import java.util.concurrent.{ CompletionStage, Executor, ExecutorService, CompletableFuture } -import scala.util.{ Try, Success, Failure } -import java.util.function.{ BiConsumer, Function ⇒ JF, Consumer, BiFunction } +import scala.concurrent.java8.FuturesConvertersImplCompat._ +import scala.concurrent.{ Future, Promise, ExecutionContext, ExecutionContextExecutorService, ExecutionContextExecutor } +import java.util.concurrent.{ CompletionStage, Executor, ExecutorService } +import java.util.function.Consumer /** * This class contains static methods which convert between Java CompletionStage @@ -28,7 +39,7 @@ import java.util.function.{ BiConsumer, Function ⇒ JF, Consumer, BiFunction } * {{{ * import java.util.concurrent.CompletionStage; * import scala.concurrent.Future; - * import static scala.concurrent.java8.FutureConverter.*; + * import static scala.concurrent.java8.FutureConverters.*; * * final CompletionStage cs = ... // from an async Java API * final Future f = toScala(cs); @@ -54,10 +65,14 @@ object FutureConverters { * not support the CompletableFuture interface */ def toJava[T](f: Future[T]): CompletionStage[T] = { - val cf = new CF[T] - implicit val ec = InternalCallbackExecutor - f onComplete cf - cf + f match { + case p: P[T @unchecked] => p.wrapped + case _ => + val cf = new CF[T](f) + implicit val ec = InternalCallbackExecutor + f onComplete cf + cf + } } /** @@ -71,9 +86,13 @@ object FutureConverters { * @return a Scala Future that represents the CompletionStage's completion */ def toScala[T](cs: CompletionStage[T]): Future[T] = { - val p = new P[T] - cs whenComplete p - p.future + cs match { + case cf: CF[T] => cf.wrapped + case _ => + val p = new P[T](cs) + cs whenComplete p + p.future + } } /** diff --git a/src/main/scala/scala/compat/java8/OptionConverters.scala b/src/main/scala/scala/compat/java8/OptionConverters.scala index b99529a..44e4194 100644 --- a/src/main/scala/scala/compat/java8/OptionConverters.scala +++ b/src/main/scala/scala/compat/java8/OptionConverters.scala @@ -1,9 +1,17 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8 -import language.implicitConversions import java.util.{Optional, OptionalDouble, OptionalInt, OptionalLong} /** This class enables bidirectional conversion between `scala.Option` and the @@ -50,30 +58,33 @@ object OptionConverters { /** Converts from `Option` to a manually specialized variant `That` */ def fromScala(o: Option[A]): That } - + /** Implementation of creation of `OptionalDouble` from `Option[Double]` or `Optional[Double]`*/ - implicit val specializer_OptionalDouble = new SpecializerOfOptions[Double, OptionalDouble] { - /** Creates an `OptionalDouble` from `Optional[Double]` */ - def fromJava(o: Optional[Double]): OptionalDouble = if (o.isPresent) OptionalDouble.of(o.get) else OptionalDouble.empty - /** Creates an `OptionalDouble` from `Option[Double]` */ - def fromScala(o: Option[Double]): OptionalDouble = o match { case Some(d) => OptionalDouble.of(d); case _ => OptionalDouble.empty } - } - + implicit val specializer_OptionalDouble: SpecializerOfOptions[Double, OptionalDouble] = + new SpecializerOfOptions[Double, OptionalDouble] { + /** Creates an `OptionalDouble` from `Optional[Double]` */ + def fromJava(o: Optional[Double]): OptionalDouble = if (o.isPresent) OptionalDouble.of(o.get) else OptionalDouble.empty + /** Creates an `OptionalDouble` from `Option[Double]` */ + def fromScala(o: Option[Double]): OptionalDouble = o match { case Some(d) => OptionalDouble.of(d); case _ => OptionalDouble.empty } + } + /** Implementation of creation of `OptionalInt` from `Option[Int]` or `Optional[Int]`*/ - implicit val specializer_OptionalInt = new SpecializerOfOptions[Int, OptionalInt] { - /** Creates an `OptionalInt` from `Optional[Int]` */ - def fromJava(o: Optional[Int]): OptionalInt = if (o.isPresent) OptionalInt.of(o.get) else OptionalInt.empty - /** Creates an `OptionalInt` from `Option[Int]` */ - def fromScala(o: Option[Int]): OptionalInt = o match { case Some(d) => OptionalInt.of(d); case _ => OptionalInt.empty } - } - + implicit val specializer_OptionalInt: SpecializerOfOptions[Int, OptionalInt] = + new SpecializerOfOptions[Int, OptionalInt] { + /** Creates an `OptionalInt` from `Optional[Int]` */ + def fromJava(o: Optional[Int]): OptionalInt = if (o.isPresent) OptionalInt.of(o.get) else OptionalInt.empty + /** Creates an `OptionalInt` from `Option[Int]` */ + def fromScala(o: Option[Int]): OptionalInt = o match { case Some(d) => OptionalInt.of(d); case _ => OptionalInt.empty } + } + /** Implementation of creation of `OptionalLong` from `Option[Long]` or `Optional[Long]`*/ - implicit val specializer_OptionalLong = new SpecializerOfOptions[Long, OptionalLong] { - /** Creates an `OptionalLong` from `Optional[Long]` */ - def fromJava(o: Optional[Long]): OptionalLong = if (o.isPresent) OptionalLong.of(o.get) else OptionalLong.empty - /** Creates an `OptionalLong` from `Option[Long]` */ - def fromScala(o: Option[Long]): OptionalLong = o match { case Some(d) => OptionalLong.of(d); case _ => OptionalLong.empty } - } + implicit val specializer_OptionalLong: SpecializerOfOptions[Long, OptionalLong] = + new SpecializerOfOptions[Long, OptionalLong] { + /** Creates an `OptionalLong` from `Optional[Long]` */ + def fromJava(o: Optional[Long]): OptionalLong = if (o.isPresent) OptionalLong.of(o.get) else OptionalLong.empty + /** Creates an `OptionalLong` from `Option[Long]` */ + def fromScala(o: Option[Long]): OptionalLong = o match { case Some(d) => OptionalLong.of(d); case _ => OptionalLong.empty } + } /** Provides conversions from `java.util.Optional` to Scala `Option` or primitive `java.util.Optional` types */ implicit class RichOptionalGeneric[A](val underlying: java.util.Optional[A]) extends AnyVal { @@ -82,7 +93,7 @@ object OptionConverters { /** Create a specialized primitive variant of this generic `Optional`, if an appropriate one exists */ def asPrimitive[That](implicit specOp: SpecializerOfOptions[A, That]): That = specOp.fromJava(underlying) } - + /** Provides conversions from `scala.Option` to Java `Optional` types, either generic or primitive */ implicit class RichOptionForJava8[A](val underlying: Option[A]) extends AnyVal { /** Create a `java.util.Optional` version of this `Option` (not specialized) */ @@ -90,7 +101,7 @@ object OptionConverters { /** Create a specialized primitive `java.util.Optional` type, if an appropriate one exists */ def asPrimitive[That](implicit specOp: SpecializerOfOptions[A, That]): That = specOp.fromScala(underlying) } - + /** Provides conversions from `java.util.OptionalDouble` to the generic `Optional` and Scala `Option` */ implicit class RichOptionalDouble(val underlying: OptionalDouble) extends AnyVal { /** Create a `scala.Option` version of this `OptionalDouble` */ @@ -98,7 +109,7 @@ object OptionConverters { /** Create a generic `java.util.Optional` version of this `OptionalDouble` */ def asGeneric: Optional[Double] = if (underlying.isPresent) Optional.of(underlying.getAsDouble) else Optional.empty[Double] } - + /** Provides conversions from `java.util.OptionalInt` to the generic `Optional` and Scala `Option` */ implicit class RichOptionalInt(val underlying: OptionalInt) extends AnyVal { /** Create a `scala.Option` version of this `OptionalInt` */ @@ -106,7 +117,7 @@ object OptionConverters { /** Create a generic `java.util.Optional` version of this `OptionalInt` */ def asGeneric: Optional[Int] = if (underlying.isPresent) Optional.of(underlying.getAsInt) else Optional.empty[Int] } - + /** Provides conversions from `java.util.OptionalLong` to the generic `Optional` and Scala `Option` */ implicit class RichOptionalLong(val underlying: OptionalLong) extends AnyVal { /** Create a `scala.Option` version of this `OptionalLong` */ @@ -114,10 +125,10 @@ object OptionConverters { /** Create a generic `java.util.Optional` version of this `OptionalLong` */ def asGeneric: Optional[Long] = if (underlying.isPresent) Optional.of(underlying.getAsLong) else Optional.empty[Long] } - + /** Conversion from Scala `Option` to Java `Optional` without using implicits, for convenient use from Java. */ final def toJava[A](o: Option[A]): Optional[A] = o match { case Some(a) => Optional.ofNullable(a); case _ => Optional.empty[A] } - + /** Conversion from Java `Optional` to Scala `Option` without using implicits, for convenient use from Java */ final def toScala[A](o: Optional[A]): Option[A] = if (o.isPresent) Some(o.get) else None diff --git a/src/main/scala/scala/compat/java8/PrimitiveIteratorConversions.scala b/src/main/scala/scala/compat/java8/PrimitiveIteratorConversions.scala index 68d4394..a032a4f 100644 --- a/src/main/scala/scala/compat/java8/PrimitiveIteratorConversions.scala +++ b/src/main/scala/scala/compat/java8/PrimitiveIteratorConversions.scala @@ -1,9 +1,17 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8 -import language.implicitConversions import java.util.{ Iterator => JIterator, PrimitiveIterator } /** This class enables conversion from `scala.Iterator` to the set of @@ -31,78 +39,81 @@ object PrimitiveIteratorConverters { /** Packages a Scala `Iterator` to a manually specialized Java variant `That` */ def fromScala(it: Iterator[A]): That } - + /** Implementation of wrapping of `java.util.Iterator[Double]` or `scala.collection.Iterator[Double]` as a `java.util.PrimitiveIterator.OfDouble` */ - implicit val specializer_PrimitiveIteratorDouble = new SpecializerOfIterators[Double, PrimitiveIterator.OfDouble] { - /** Packages a `java.util.Iterator[Double]` as a `java.util.PrimitiveIterator.OfDouble` */ - def fromJava(it: JIterator[Double]): PrimitiveIterator.OfDouble = - new wrappers.IteratorPrimitiveDoubleWrapper(it.asInstanceOf[JIterator[java.lang.Double]]) - - /** Packages a `scala.collection.Iterator[Double]` as a `java.util.PrimitiveIterator.OfDouble` */ - def fromScala(it: Iterator[Double]): PrimitiveIterator.OfDouble = new PrimitiveIterator.OfDouble { - def hasNext = it.hasNext - def next() = it.next() - def nextDouble() = it.next() - def remove() { throw new UnsupportedOperationException("remove on scala.collection.Iterator") } - def forEachRemaining(c: java.util.function.Consumer[_ >: java.lang.Double]) { - while (it.hasNext) c.accept(it.next) - } - def forEachRemaining(c: java.util.function.DoubleConsumer) { - while (it.hasNext) c.accept(it.next) + implicit val specializer_PrimitiveIteratorDouble: SpecializerOfIterators[Double, PrimitiveIterator.OfDouble] = + new SpecializerOfIterators[Double, PrimitiveIterator.OfDouble] { + /** Packages a `java.util.Iterator[Double]` as a `java.util.PrimitiveIterator.OfDouble` */ + def fromJava(it: JIterator[Double]): PrimitiveIterator.OfDouble = + new wrappers.IteratorPrimitiveDoubleWrapper(it.asInstanceOf[JIterator[java.lang.Double]]) + + /** Packages a `scala.collection.Iterator[Double]` as a `java.util.PrimitiveIterator.OfDouble` */ + def fromScala(it: Iterator[Double]): PrimitiveIterator.OfDouble = new PrimitiveIterator.OfDouble { + def hasNext = it.hasNext + override def next() = it.next().asInstanceOf[java.lang.Double] + def nextDouble() = it.next() + override def remove(): Unit = { throw new UnsupportedOperationException("remove on scala.collection.Iterator") } + override def forEachRemaining(c: java.util.function.Consumer[_ >: java.lang.Double]): Unit = { + while (it.hasNext) c.accept(it.next()) + } + override def forEachRemaining(c: java.util.function.DoubleConsumer): Unit = { + while (it.hasNext) c.accept(it.next()) + } } } - } - + /** Implementation of wrapping of `java.util.Iterator[Int]` or `scala.collection.Iterator[Int]` as a `java.util.PrimitiveIterator.OfInt` */ - implicit val specializer_PrimitiveIteratorInt = new SpecializerOfIterators[Int, PrimitiveIterator.OfInt] { - /** Packages a `java.util.Iterator[Int]` as a `java.util.PrimitiveIterator.OfInt` */ - def fromJava(it: JIterator[Int]): PrimitiveIterator.OfInt = - new wrappers.IteratorPrimitiveIntWrapper(it.asInstanceOf[JIterator[java.lang.Integer]]) - - /** Packages a `scala.collection.Iterator[Int]` as a `java.util.PrimitiveIterator.OfInt` */ - def fromScala(it: Iterator[Int]): PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { - def hasNext = it.hasNext - def next() = it.next() - def nextInt() = it.next() - def remove() { throw new UnsupportedOperationException("remove on scala.collection.Iterator") } - def forEachRemaining(c: java.util.function.Consumer[_ >: java.lang.Integer]) { - while (it.hasNext) c.accept(it.next) - } - def forEachRemaining(c: java.util.function.IntConsumer) { - while (it.hasNext) c.accept(it.next) + implicit val specializer_PrimitiveIteratorInt: SpecializerOfIterators[Int, PrimitiveIterator.OfInt] = + new SpecializerOfIterators[Int, PrimitiveIterator.OfInt] { + /** Packages a `java.util.Iterator[Int]` as a `java.util.PrimitiveIterator.OfInt` */ + def fromJava(it: JIterator[Int]): PrimitiveIterator.OfInt = + new wrappers.IteratorPrimitiveIntWrapper(it.asInstanceOf[JIterator[java.lang.Integer]]) + + /** Packages a `scala.collection.Iterator[Int]` as a `java.util.PrimitiveIterator.OfInt` */ + def fromScala(it: Iterator[Int]): PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { + def hasNext = it.hasNext + override def next() = it.next().asInstanceOf[java.lang.Integer] + def nextInt() = it.next() + override def remove(): Unit = { throw new UnsupportedOperationException("remove on scala.collection.Iterator") } + override def forEachRemaining(c: java.util.function.Consumer[_ >: java.lang.Integer]): Unit = { + while (it.hasNext) c.accept(it.next()) + } + override def forEachRemaining(c: java.util.function.IntConsumer): Unit = { + while (it.hasNext) c.accept(it.next()) + } } } - } - + /** Implementation of wrapping of `java.util.Iterator[Long]` or `scala.collection.Iterator[Long]` as a `java.util.PrimitiveIterator.OfLong` */ - implicit val specializer_PrimitiveIteratorLong = new SpecializerOfIterators[Long, PrimitiveIterator.OfLong] { - /** Packages a `java.util.Iterator[Long]` as a `java.util.PrimitiveIterator.OfLong` */ - def fromJava(it: JIterator[Long]): PrimitiveIterator.OfLong = - new wrappers.IteratorPrimitiveLongWrapper(it.asInstanceOf[JIterator[java.lang.Long]]) - - /** Packages a `scala.collection.Iterator[Long]` as a `java.util.PrimitiveIterator.OfLong` */ - def fromScala(it: Iterator[Long]): PrimitiveIterator.OfLong = new PrimitiveIterator.OfLong { - def hasNext = it.hasNext - def next() = it.next() - def nextLong() = it.next() - def remove() { throw new UnsupportedOperationException("remove on scala.collection.Iterator") } - def forEachRemaining(c: java.util.function.Consumer[_ >: java.lang.Long]) { - while (it.hasNext) c.accept(it.next) - } - def forEachRemaining(c: java.util.function.LongConsumer) { - while (it.hasNext) c.accept(it.next) + implicit val specializer_PrimitiveIteratorLong: SpecializerOfIterators[Long, PrimitiveIterator.OfLong] = + new SpecializerOfIterators[Long, PrimitiveIterator.OfLong] { + /** Packages a `java.util.Iterator[Long]` as a `java.util.PrimitiveIterator.OfLong` */ + def fromJava(it: JIterator[Long]): PrimitiveIterator.OfLong = + new wrappers.IteratorPrimitiveLongWrapper(it.asInstanceOf[JIterator[java.lang.Long]]) + + /** Packages a `scala.collection.Iterator[Long]` as a `java.util.PrimitiveIterator.OfLong` */ + def fromScala(it: Iterator[Long]): PrimitiveIterator.OfLong = new PrimitiveIterator.OfLong { + def hasNext = it.hasNext + override def next() = it.next().asInstanceOf[java.lang.Long] + def nextLong() = it.next() + override def remove(): Unit = { throw new UnsupportedOperationException("remove on scala.collection.Iterator") } + override def forEachRemaining(c: java.util.function.Consumer[_ >: java.lang.Long]): Unit = { + while (it.hasNext) c.accept(it.next()) + } + override def forEachRemaining(c: java.util.function.LongConsumer): Unit = { + while (it.hasNext) c.accept(it.next()) + } } } - } - + /** Provides conversions from Java `Iterator` to manually specialized `PrimitiveIterator` variants, when available */ - implicit class RichJavaIteratorToPrimitives[A](val underlying: JIterator[A]) extends AnyVal { + implicit final class RichJavaIteratorToPrimitives[A](private val underlying: JIterator[A]) extends AnyVal { /** Wraps this `java.util.Iterator` as a manually specialized variant, if possible */ def asPrimitive[That](implicit specOp: SpecializerOfIterators[A, That]): That = specOp.fromJava(underlying) } - + /** Provides conversions from Scala `Iterator` to manually specialized `PrimitiveIterator` variants, when available */ - implicit class RichIteratorToPrimitives[A](val underlying: Iterator[A]) extends AnyVal { + implicit final class RichIteratorToPrimitives[A](private val underlying: Iterator[A]) extends AnyVal { /** Wraps this `scala.collection.Iterator` as a manually specialized `java.util.PrimitiveIterator` variant, if possible */ def asPrimitive[That](implicit specOp: SpecializerOfIterators[A, That]): That = specOp.fromScala(underlying) } diff --git a/src/main/scala/scala/compat/java8/WrapperTraits.scala b/src/main/scala/scala/compat/java8/WrapperTraits.scala new file mode 100644 index 0000000..8d861ba --- /dev/null +++ b/src/main/scala/scala/compat/java8/WrapperTraits.scala @@ -0,0 +1,25 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +/** A trait that indicates that the class is or can be converted to a Scala version by wrapping a Java class */ +trait WrappedAsScala[S] { + /** Returns an appropriate Scala version */ + def asScala: S +} + +/** A trait that indicates that the class is or can be converted to a Java version by wrapping a Scala class */ +trait WrappedAsJava[J] { + /** Returns an appropriate Java version */ + def asJava: J +} diff --git a/src/main/scala/scala/concurrent/java8/FutureConvertersImpl.scala b/src/main/scala/scala/concurrent/java8/FutureConvertersImpl.scala index 14a917f..9991acd 100644 --- a/src/main/scala/scala/concurrent/java8/FutureConvertersImpl.scala +++ b/src/main/scala/scala/concurrent/java8/FutureConvertersImpl.scala @@ -1,23 +1,32 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.concurrent.java8 // Located in this package to access private[concurrent] members -import scala.concurrent.{ Future, Promise, ExecutionContext, ExecutionContextExecutorService, ExecutionContextExecutor, impl } -import java.util.concurrent.{ CompletionStage, Executor, ExecutorService, CompletableFuture } -import scala.util.{ Try, Success, Failure } -import java.util.function.{ BiConsumer, Function ⇒ JF, Consumer, BiFunction } +import java.util.concurrent._ +import java.util.function.{BiConsumer, BiFunction, Consumer, Function => JF} -// TODO: make thie private[scala] when genjavadoc allows for that. -object FuturesConvertersImpl { - def InternalCallbackExecutor = Future.InternalCallbackExecutor +import scala.concurrent.Future +import scala.concurrent.impl.Promise.DefaultPromise +import scala.util.{Failure, Success, Try} - class CF[T] extends CompletableFuture[T] with (Try[T] => Unit) { +// TODO: make this private[scala] when genjavadoc allows for that. +object FuturesConvertersImpl { + class CF[T](val wrapped: Future[T]) extends CompletableFuture[T] with (Try[T] => Unit) { override def apply(t: Try[T]): Unit = t match { - case Success(v) ⇒ complete(v) - case Failure(e) ⇒ completeExceptionally(e) + case Success(v) => complete(v) + case Failure(e) => completeExceptionally(e) } /* @@ -57,7 +66,7 @@ object FuturesConvertersImpl { try { fn(e).asInstanceOf[AnyRef] } catch { - case thr: Throwable ⇒ cf.completeExceptionally(thr); this + case thr: Throwable => cf.completeExceptionally(thr); this } if (n ne this) cf.complete(n.asInstanceOf[T]) } @@ -66,13 +75,26 @@ object FuturesConvertersImpl { cf } - override def toCompletableFuture(): CompletableFuture[T] = - throw new UnsupportedOperationException("this CompletionStage represents a read-only Scala Future") + /** + * @inheritdoc + * + * WARNING: completing the result of this method will not complete the underlying + * Scala Future or Promise (ie, the one that that was passed to `toJava`.) + */ + override def toCompletableFuture(): CompletableFuture[T] = this + + override def obtrudeValue(value: T): Unit = throw new UnsupportedOperationException("obtrudeValue may not be used on the result of toJava(scalaFuture)") + + override def obtrudeException(ex: Throwable): Unit = throw new UnsupportedOperationException("obtrudeException may not be used on the result of toJava(scalaFuture)") + + override def get(): T = scala.concurrent.blocking(super.get()) + + override def get(timeout: Long, unit: TimeUnit): T = scala.concurrent.blocking(super.get(timeout, unit)) override def toString: String = super[CompletableFuture].toString } - class P[T] extends impl.Promise.DefaultPromise[T] with BiConsumer[T, Throwable] { + class P[T](val wrapped: CompletionStage[T]) extends DefaultPromise[T] with BiConsumer[T, Throwable] { override def accept(v: T, e: Throwable): Unit = { if (e == null) complete(Success(v)) else complete(Failure(e)) diff --git a/src/test/java-2.11/scala/compat/java8/runtime/LambdaDeserializerTest.java b/src/test/java-2.11/scala/compat/java8/runtime/LambdaDeserializerTest.java new file mode 100644 index 0000000..a6b71d2 --- /dev/null +++ b/src/test/java-2.11/scala/compat/java8/runtime/LambdaDeserializerTest.java @@ -0,0 +1,218 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.runtime; + +import org.junit.Assert; +import org.junit.Assume; +import org.junit.Test; + +import java.io.Serializable; +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.SerializedLambda; +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.HashMap; + +public final class LambdaDeserializerTest { + private LambdaHost lambdaHost = new LambdaHost(); + + // We skip most tests on Java 17+ because of https://github.com/scala/bug/issues/12419 + // which we only fixed for 2.12+ + + @Test + public void serializationPrivate() { + Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); + F1 f1 = lambdaHost.lambdaBackedByPrivateImplMethod(); + Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true)); + } + + @Test + public void serializationStatic() { + Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); + F1 f1 = lambdaHost.lambdaBackedByStaticImplMethod(); + Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true)); + } + + @Test + public void serializationVirtualMethodReference() { + Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); + F1 f1 = lambdaHost.lambdaBackedByVirtualMethodReference(); + Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true)); + } + + @Test + public void serializationInterfaceMethodReference() { + Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); + F1 f1 = lambdaHost.lambdaBackedByInterfaceMethodReference(); + I i = new I() { + }; + Assert.assertEquals(f1.apply(i), reconstitute(f1).apply(i)); + } + + @Test + public void serializationStaticMethodReference() { + Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); + F1 f1 = lambdaHost.lambdaBackedByStaticMethodReference(); + Assert.assertEquals(f1.apply(true), reconstitute(f1).apply(true)); + } + + @Test + public void serializationNewInvokeSpecial() { + Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); + F0 f1 = lambdaHost.lambdaBackedByConstructorCall(); + Assert.assertEquals(f1.apply(), reconstitute(f1).apply()); + } + + @Test + public void uncached() { + Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); + F0 f1 = lambdaHost.lambdaBackedByConstructorCall(); + F0 reconstituted1 = reconstitute(f1); + F0 reconstituted2 = reconstitute(f1); + Assert.assertNotEquals(reconstituted1.getClass(), reconstituted2.getClass()); + } + + @Test + public void cached() { + Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); + HashMap cache = new HashMap<>(); + F0 f1 = lambdaHost.lambdaBackedByConstructorCall(); + F0 reconstituted1 = reconstitute(f1, cache); + F0 reconstituted2 = reconstitute(f1, cache); + Assert.assertEquals(reconstituted1.getClass(), reconstituted2.getClass()); + } + + @Test + public void cachedStatic() { + Assume.assumeFalse(scala.util.Properties.isJavaAtLeast("17")); + HashMap cache = new HashMap<>(); + F1 f1 = lambdaHost.lambdaBackedByStaticImplMethod(); + // Check that deserialization of a static lambda always returns the + // same instance. + Assert.assertSame(reconstitute(f1, cache), reconstitute(f1, cache)); + + // (as is the case with regular invocation.) + Assert.assertSame(f1, lambdaHost.lambdaBackedByStaticImplMethod()); + } + + @Test + public void implMethodNameChanged() { + F1 f1 = lambdaHost.lambdaBackedByStaticImplMethod(); + SerializedLambda sl = writeReplace(f1); + checkIllegalAccess(copySerializedLambda(sl, sl.getImplMethodName() + "___", sl.getImplMethodSignature())); + } + + @Test + public void implMethodSignatureChanged() { + F1 f1 = lambdaHost.lambdaBackedByStaticImplMethod(); + SerializedLambda sl = writeReplace(f1); + checkIllegalAccess(copySerializedLambda(sl, sl.getImplMethodName(), sl.getImplMethodSignature().replace("Boolean", "Integer"))); + } + + private void checkIllegalAccess(SerializedLambda serialized) { + try { + LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), null, serialized); + throw new AssertionError(); + } catch (IllegalArgumentException iae) { + if (!iae.getMessage().contains("Illegal lambda deserialization")) { + Assert.fail("Unexpected message: " + iae.getMessage()); + } + } + } + + private SerializedLambda copySerializedLambda(SerializedLambda sl, String implMethodName, String implMethodSignature) { + Object[] captures = new Object[sl.getCapturedArgCount()]; + for (int i = 0; i < captures.length; i++) { + captures[i] = sl.getCapturedArg(i); + } + return new SerializedLambda(loadClass(sl.getCapturingClass()), sl.getFunctionalInterfaceClass(), sl.getFunctionalInterfaceMethodName(), + sl.getFunctionalInterfaceMethodSignature(), sl.getImplMethodKind(), sl.getImplClass(), implMethodName, implMethodSignature, + sl.getInstantiatedMethodType(), captures); + } + + private Class loadClass(String className) { + try { + return Class.forName(className.replace('/', '.')); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + private A reconstitute(A f1) { + return reconstitute(f1, null); + } + + @SuppressWarnings("unchecked") + private A reconstitute(A f1, java.util.HashMap cache) { + try { + return (A) LambdaDeserializer.deserializeLambda(LambdaHost.lookup(), cache, writeReplace(f1)); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private SerializedLambda writeReplace(A f1) { + try { + Method writeReplace = f1.getClass().getDeclaredMethod("writeReplace"); + writeReplace.setAccessible(true); + return (SerializedLambda) writeReplace.invoke(f1); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} + + +interface F1 extends Serializable { + B apply(A a); +} + +interface F0 extends Serializable { + A apply(); +} + +class LambdaHost { + public F1 lambdaBackedByPrivateImplMethod() { + int local = 42; + return (b) -> Arrays.asList(local, b ? "true" : "false", LambdaHost.this).toString(); + } + + @SuppressWarnings("Convert2MethodRef") + public F1 lambdaBackedByStaticImplMethod() { + return (b) -> String.valueOf(b); + } + + public F1 lambdaBackedByStaticMethodReference() { + return String::valueOf; + } + + public F1 lambdaBackedByVirtualMethodReference() { + return Object::toString; + } + + public F1 lambdaBackedByInterfaceMethodReference() { + return I::i; + } + + public F0 lambdaBackedByConstructorCall() { + return String::new; + } + + public static MethodHandles.Lookup lookup() { + return MethodHandles.lookup(); + } +} + +interface I { + default String i() { return "i"; }; +} diff --git a/src/test/java/scala/compat/java8/BoxingTest.java b/src/test/java/scala/compat/java8/BoxingTest.java index 7074882..7f798ff 100644 --- a/src/test/java/scala/compat/java8/BoxingTest.java +++ b/src/test/java/scala/compat/java8/BoxingTest.java @@ -1,31 +1,43 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8; import org.junit.Test; +import scala.runtime.java8.*; public class BoxingTest { @Test public void nullBoxesInterpretedAsZeroF1() { - JFunction1 jFunction1 = new JFunction1$mcII$sp() { + Object o = new JFunction1$mcII$sp() { @Override public int apply$mcII$sp(int v1) { return v1 + 1; } }; + scala.Function1 jFunction1 = (scala.Function1)o; Integer result = (Integer) jFunction1.apply(null); assert (result.intValue() == 1); } @Test public void nullBoxesInterpretedAsZeroF2() { - JFunction2 jFunction2 = new JFunction2$mcIII$sp() { + Object o = new JFunction2$mcIII$sp() { @Override public int apply$mcIII$sp(int v1, int v2) { return v1 + v2 + 1; } }; + scala.Function2 jFunction2 = (scala.Function2)o; Integer result = (Integer) jFunction2.apply(null, null); assert (result.intValue() == 1); } diff --git a/src/test/java/scala/compat/java8/DurationConvertersJavaTest.java b/src/test/java/scala/compat/java8/DurationConvertersJavaTest.java new file mode 100644 index 0000000..7d214a9 --- /dev/null +++ b/src/test/java/scala/compat/java8/DurationConvertersJavaTest.java @@ -0,0 +1,31 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8; + +import org.junit.Test; +import scala.concurrent.duration.FiniteDuration; +import scala.runtime.java8.*; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.TimeUnit; + +public class DurationConvertersJavaTest { + + @Test + public void apiAccessibleFromJava() { + DurationConverters.toScala(Duration.of(5, ChronoUnit.SECONDS)); + DurationConverters.toJava(FiniteDuration.create(5, TimeUnit.SECONDS)); + } + +} \ No newline at end of file diff --git a/src/test/java/scala/compat/java8/FutureConvertersTest.java b/src/test/java/scala/compat/java8/FutureConvertersTest.java index d7fc1f2..e3e492d 100644 --- a/src/test/java/scala/compat/java8/FutureConvertersTest.java +++ b/src/test/java/scala/compat/java8/FutureConvertersTest.java @@ -1,323 +1,429 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ -package scala.compat.java8; - -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import static java.util.concurrent.TimeUnit.*; -import static org.junit.Assert.*; +package scala.compat.java8; import org.junit.Test; - import scala.concurrent.Future; import scala.concurrent.Promise; + +import java.util.concurrent.*; + +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static java.util.concurrent.TimeUnit.SECONDS; +import static org.junit.Assert.*; +import static org.junit.Assert.assertSame; import static scala.compat.java8.FutureConverters.*; public class FutureConvertersTest { - @Test - public void testToScalaSuccess() { - final CompletableFuture cs = new CompletableFuture<>(); - final Future f = toScala(cs); - assertFalse("f must not yet be completed", f.isCompleted()); - cs.complete("Hello"); - assertTrue("f must be completed by now", f.isCompleted()); - assertEquals("Hello", f.value().get().get()); - } - - @Test - public void testToScalaFailure() { - final CompletableFuture cs = new CompletableFuture<>(); - final Future f = toScala(cs); - assertFalse("f must not yet be completed", f.isCompleted()); - final Exception ex = new RuntimeException("Hello"); - cs.completeExceptionally(ex); - assertTrue("f must be completed by now", f.isCompleted()); - assertEquals(ex, f.value().get().failed().get()); - } - - @Test - public void testToJavaSuccess() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CompletableFuture cp = (CompletableFuture) cs; - assertFalse("cs must not yet be completed", cp.isDone()); - p.success("Hello"); - assertTrue("cs must be completed by now", cp.isDone()); - assertEquals("Hello", cp.get()); - } - - @Test - public void testToJavaFailure() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CompletableFuture cp = (CompletableFuture) cs; - assertFalse("cs must not yet be completed", cp.isDone()); - final Exception ex = new RuntimeException("Hello"); - p.failure(ex); - assertTrue("cs must be completed by now", cp.isDone()); - assertEquals("exceptionally equals", ex.toString(), cp.exceptionally(x -> x.toString()).get()); - Throwable thr = null; - try { - cp.get(); - } catch (Throwable t) { - thr = t; + @Test + public void testToScalaSuccess() { + final CompletableFuture cs = new CompletableFuture<>(); + final Future f = toScala(cs); + assertFalse("f must not yet be completed", f.isCompleted()); + cs.complete("Hello"); + assertTrue("f must be completed by now", f.isCompleted()); + assertEquals("Hello", f.value().get().get()); } - assertNotNull("get() must throw", thr); - assertEquals("thrown exception must be wrapped", ExecutionException.class, thr.getClass()); - assertEquals("wrapper must contain the right exception", ex, thr.getCause()); - } - - @Test - public void testToJavaThenApply() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.thenApply(x -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - return x; - }); - p.success("Hello"); - latch.countDown(); - assertEquals("Hello", second.toCompletableFuture().get()); - } - - @Test - public void testToJavaThenAccept() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.thenAccept(x -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - p.success("Hello"); - latch.countDown(); - assertNull("result must be Void", second.toCompletableFuture().get()); - } - - @Test - public void testToJavaThenRun() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.thenRun(() -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - p.success("Hello"); - latch.countDown(); - assertNull("result must be Void", second.toCompletableFuture().get()); - } - - @Test - public void testToJavaThenCombine() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CompletionStage other = CompletableFuture.completedFuture(42); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.thenCombine(other, (x, y) -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - return x.length() + y; - }); - p.success("Hello"); - latch.countDown(); - assertEquals((Integer) 47, second.toCompletableFuture().get()); - } - - @Test - public void testToJavaThenAcceptBoth() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CompletionStage other = CompletableFuture.completedFuture(42); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.thenAcceptBoth(other, (x, y) -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - p.success("Hello"); - latch.countDown(); - assertNull("result must be Void", second.toCompletableFuture().get()); - } - - @Test - public void testToJavaRunAfterBoth() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CompletionStage other = CompletableFuture.completedFuture(42); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.runAfterBoth(other, () -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - p.success("Hello"); - latch.countDown(); - assertNull("result must be Void", second.toCompletableFuture().get()); - } - - @Test - public void testToJavaApplyToEither() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CompletionStage other = new CompletableFuture<>(); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.applyToEither(other, x -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - return x.length(); - }); - p.success("Hello"); - latch.countDown(); - assertEquals((Integer) 5, second.toCompletableFuture().get()); - } - - @Test - public void testToJavaAcceptEither() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CompletionStage other = new CompletableFuture<>(); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.acceptEither(other, x -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - p.success("Hello"); - latch.countDown(); - assertNull("result must be Void", second.toCompletableFuture().get()); - } - - @Test - public void testToJavaRunAfterEither() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CompletionStage other = new CompletableFuture<>(); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.runAfterEither(other, () -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - p.success("Hello"); - latch.countDown(); - assertNull("result must be Void", second.toCompletableFuture().get()); - } - - @Test - public void testToJavaThenCompose() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.thenCompose(x -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - return CompletableFuture.completedFuture(x); - }); - p.success("Hello"); - latch.countDown(); - assertEquals("Hello", second.toCompletableFuture().get()); - } - - @Test - public void testToJavaWhenComplete() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.whenComplete((v, e) -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - }); - p.success("Hello"); - latch.countDown(); - assertEquals("Hello", second.toCompletableFuture().get()); - } - - @Test - public void testToJavaHandle() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.handle((v, e) -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - return v.length(); - }); - p.success("Hello"); - latch.countDown(); - assertEquals((Integer) 5, second.toCompletableFuture().get()); - } - - @Test - public void testToJavaExceptionally() throws InterruptedException, - ExecutionException { - final Promise p = promise(); - final CompletionStage cs = toJava(p.future()); - final CountDownLatch latch = new CountDownLatch(1); - final CompletionStage second = cs.exceptionally(e -> { - try { - assertTrue("latch must succeed", latch.await(1, SECONDS)); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - return e.getMessage(); - }); - p.failure(new RuntimeException("Hello")); - latch.countDown(); - assertEquals("Hello", second.toCompletableFuture().get()); - } + @Test + public void testToScalaFailure() { + final CompletableFuture cs = new CompletableFuture<>(); + final Future f = toScala(cs); + assertFalse("f must not yet be completed", f.isCompleted()); + final Exception ex = new RuntimeException("Hello"); + cs.completeExceptionally(ex); + assertTrue("f must be completed by now", f.isCompleted()); + assertEquals(ex, f.value().get().failed().get()); + } + + @Test + public void testToJavaSuccess() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CompletableFuture cp = (CompletableFuture) cs; + assertFalse("cs must not yet be completed", cp.isDone()); + p.success("Hello"); + assertTrue("cs must be completed by now", cp.isDone()); + assertEquals("Hello", cp.get()); + } + + @Test + public void testToJavaFailure() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CompletableFuture cp = (CompletableFuture) cs; + assertFalse("cs must not yet be completed", cp.isDone()); + final Exception ex = new RuntimeException("Hello"); + p.failure(ex); + assertTrue("cs must be completed by now", cp.isDone()); + assertEquals("exceptionally equals", ex.toString(), cp.exceptionally(x -> x.toString()).get()); + Throwable thr = null; + try { + cp.get(); + } catch (Throwable t) { + thr = t; + } + assertNotNull("get() must throw", thr); + assertEquals("thrown exception must be wrapped", ExecutionException.class, thr.getClass()); + assertEquals("wrapper must contain the right exception", ex, thr.getCause()); + } + + @Test + public void testToJavaThenApply() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.thenApply(x -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + return x; + }); + p.success("Hello"); + latch.countDown(); + assertEquals("Hello", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaThenAccept() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.thenAccept(x -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + p.success("Hello"); + latch.countDown(); + assertNull("result must be Void", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaThenRun() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.thenRun(() -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + p.success("Hello"); + latch.countDown(); + assertNull("result must be Void", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaThenCombine() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CompletionStage other = CompletableFuture.completedFuture(42); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.thenCombine(other, (x, y) -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + return x.length() + y; + }); + p.success("Hello"); + latch.countDown(); + assertEquals((Integer) 47, second.toCompletableFuture().get()); + } + + @Test + public void testToJavaThenAcceptBoth() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CompletionStage other = CompletableFuture.completedFuture(42); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.thenAcceptBoth(other, (x, y) -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + p.success("Hello"); + latch.countDown(); + assertNull("result must be Void", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaRunAfterBoth() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CompletionStage other = CompletableFuture.completedFuture(42); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.runAfterBoth(other, () -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + p.success("Hello"); + latch.countDown(); + assertNull("result must be Void", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaApplyToEither() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CompletionStage other = new CompletableFuture<>(); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.applyToEither(other, x -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + return x.length(); + }); + p.success("Hello"); + latch.countDown(); + assertEquals((Integer) 5, second.toCompletableFuture().get()); + } + + @Test + public void testToJavaAcceptEither() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CompletionStage other = new CompletableFuture<>(); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.acceptEither(other, x -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + p.success("Hello"); + latch.countDown(); + assertNull("result must be Void", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaRunAfterEither() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CompletionStage other = new CompletableFuture<>(); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.runAfterEither(other, () -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + p.success("Hello"); + latch.countDown(); + assertNull("result must be Void", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaThenCompose() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.thenCompose(x -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + return CompletableFuture.completedFuture(x); + }); + p.success("Hello"); + latch.countDown(); + assertEquals("Hello", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaWhenComplete() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.whenComplete((v, e) -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception ex) { + throw new RuntimeException(ex); + } + }); + p.success("Hello"); + latch.countDown(); + assertEquals("Hello", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaHandle() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.handle((v, e) -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception ex) { + throw new RuntimeException(ex); + } + return v.length(); + }); + p.success("Hello"); + latch.countDown(); + assertEquals((Integer) 5, second.toCompletableFuture().get()); + } + + @Test + public void testToJavaExceptionally() throws InterruptedException, + ExecutionException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + final CountDownLatch latch = new CountDownLatch(1); + final CompletionStage second = cs.exceptionally(e -> { + try { + assertTrue("latch must succeed", latch.await(1, SECONDS)); + } catch (Exception ex) { + throw new RuntimeException(ex); + } + return e.getMessage(); + }); + p.failure(new RuntimeException("Hello")); + latch.countDown(); + assertEquals("Hello", second.toCompletableFuture().get()); + } + + @Test + public void testToJavaThenComposeWithToJavaThenAccept() throws InterruptedException, + ExecutionException, TimeoutException { + // Test case from https://github.com/scala/scala-java8-compat/issues/29 + final Promise p1 = promise(); + final CompletableFuture future = new CompletableFuture<>(); + + CompletableFuture.supplyAsync(() -> "Hello"). + thenCompose(x -> toJava(p1.future())).handle((x, t) -> future.complete(x)); + p1.success("Hello"); + assertEquals("Hello", future.get(1000, MILLISECONDS)); + } + + @Test + public void testToJavaToCompletableFuture() throws ExecutionException, InterruptedException { + final Promise p = promise(); + final CompletionStage cs = toJava(p.future()); + CompletableFuture cf = cs.toCompletableFuture(); + assertEquals("notyet", cf.getNow("notyet")); + p.success("done"); + assertEquals("done", cf.get()); + } + + @Test + public void testToJavaToCompletableFutureDoesNotMutateUnderlyingPromise() throws ExecutionException, InterruptedException { + final Promise p = promise(); + Future sf = p.future(); + final CompletionStage cs = toJava(sf); + CompletableFuture cf = cs.toCompletableFuture(); + assertEquals("notyet", cf.getNow("notyet")); + cf.complete("done"); + assertEquals("done", cf.get()); + assertFalse(sf.isCompleted()); + assertFalse(p.isCompleted()); + } + + @Test + public void testToJavaToCompletableFutureJavaCompleteCalledAfterScalaComplete() throws ExecutionException, InterruptedException { + final Promise p = promise(); + Future sf = p.future(); + final CompletionStage cs = toJava(sf); + CompletableFuture cf = cs.toCompletableFuture(); + assertEquals("notyet", cf.getNow("notyet")); + p.success("scaladone"); + assertEquals("scaladone", cf.get()); + cf.complete("javadone"); + assertEquals("scaladone", cf.get()); + } + + @Test + public void testToJavaToCompletableFutureJavaCompleteCalledBeforeScalaComplete() throws ExecutionException, InterruptedException { + final Promise p = promise(); + Future sf = p.future(); + final CompletionStage cs = toJava(sf); + CompletableFuture cf = cs.toCompletableFuture(); + assertEquals("notyet", cf.getNow("notyet")); + cf.complete("javadone"); + assertEquals("javadone", cf.get()); + p.success("scaladone"); + assertEquals("javadone", cf.get()); + } + + @Test + public void testToJavaToCompletableFutureJavaObtrudeCalledBeforeScalaComplete() throws ExecutionException, InterruptedException { + final Promise p = promise(); + Future sf = p.future(); + final CompletionStage cs = toJava(sf); + CompletableFuture cf = cs.toCompletableFuture(); + try { + cf.obtrudeValue(""); + fail(); + } catch (UnsupportedOperationException iae) { + // okay + } + try { + cf.obtrudeException(new Exception()); + fail(); + } catch (UnsupportedOperationException iae) { + // okay + } + } + + @Test + public void testToJavaAndBackAvoidsWrappers() { + final Promise p = promise(); + final Future sf = p.future(); + final CompletionStage cs = toJava(sf); + Future sf1 = toScala(cs); + assertSame(sf, sf1); + } + + @Test + public void testToScalaAndBackAvoidsWrappers() { + final CompletableFuture cf = new CompletableFuture<>(); + final Future f = toScala(cf); + CompletionStage cs1 = toJava(f); + assertSame(cf, cs1); + + } } diff --git a/src/test/java/scala/compat/java8/LambdaTest.java b/src/test/java/scala/compat/java8/LambdaTest.java index c626083..5127c18 100644 --- a/src/test/java/scala/compat/java8/LambdaTest.java +++ b/src/test/java/scala/compat/java8/LambdaTest.java @@ -1,17 +1,33 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8; +import org.apache.commons.lang3.SerializationUtils; import scala.runtime.*; + +import static junit.framework.Assert.assertEquals; import static scala.compat.java8.JFunction.*; import static scala.compat.java8.TestAPI.*; import org.junit.Test; + public class LambdaTest { @Test public void lambdaDemo() { + // Scala 2.12+ only: + //scala.Function1 f1 = (String s) -> s; + // Not allowed with Scala 2.10 nor 2.11 // "incompatible types: Function1 is not a functional interface" // scala.Function1 f = (String s) -> s; @@ -25,6 +41,9 @@ public void lambdaDemo() { // That's a pity, but we can get pretty close with this library! // We have to tell javac to use `JFunction1` as the functional interface. + // Scala 2.12 does not have or need JFunction anymore. We provide it as a + // deprecated stub for backwards compatibility. Use `scala.Function1` for + // code that targets Scala 2.12+ exclusively. JFunction1 f1 = (String s) -> s; // That's more or less equivalent to the old, anonymous class syntax: @@ -54,20 +73,29 @@ public void lambdaDemo() { // f1.apply(""); - // Specialized variants of the `apply` method are implenented in the - // functional interface + // Specialized variants of the `apply` method are provided but implementing a specialized + // Scala function in this straight-forward way results in boxing and unboxing because the + // Java lambda operates on boxed types: JFunction1 f5 = (i) -> -i; assert(f5.apply(1) == -1); assert(f5.apply$mcII$sp(1) == -1); + // We provide `JFunction.funcSpecialized` and `JFunction.procSpecialized` methods to avoid + // boxing: + scala.Function1 f5b = funcSpecialized((int i) -> -i); + assert(f5b.apply(1) == -1); + assert(f5b.apply$mcII$sp(1) == -1); + // as are `curried`, `tupled`, `compose`, `andThen`. f3.compose(f3).andThen(f3).apply(""); scala.Function2 f6 = func((s1, s2) -> join(s1, s2)); assert(f6.curried().apply("1").apply("2").equals("12")); - // Functions returning unit must use the `JProcedure1`, ... functional interfaces - // in order to convert a void lamdba return to Scala's Unit. - // + // Functions returning unit can use the `JProcedure1`, ... functional interfaces + // in order to convert a void lamdba return to Scala's Unit: + JProcedure1 f7b = s -> sideEffect(); + scala.Function1 f7c = f7b; + // The easiest way to do this is via `JFunction.proc`, .... // // Note that the lambda has a return type of `void` if the last @@ -95,8 +123,23 @@ public void lambdaDemo() { acceptFunction22Unit( proc((v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, v17, v18, v19, v20, v21, v22) -> {v1.toUpperCase(); return;})); } + @Test + public void isSerializable() { + JFunction0 f0 = () -> "foo"; + assertEquals("foo", SerializationUtils.clone(f0).apply()); + + JFunction1 f1 = (a) -> a.toUpperCase(); + assertEquals("FOO", SerializationUtils.clone(f1).apply("foo")); + + JFunction2 f2 = (a, b) -> a + b; + assertEquals("foobar", SerializationUtils.clone(f2).apply("foo", "bar")); + + JFunction3 f3 = (a, b, c) -> a + b + c; + assertEquals("foobarbaz", SerializationUtils.clone(f3).apply("foo", "bar", "baz")); + } + private static scala.concurrent.Future futureExample( - scala.concurrent.Future future, scala.concurrent.ExecutionContext ec) { + scala.concurrent.Future future, scala.concurrent.ExecutionContext ec) { return future.map(func(s -> s.toUpperCase()), ec).map(func(s -> s.length()), ec); } diff --git a/src/test/java/scala/compat/java8/OptionConvertersTest.scala b/src/test/java/scala/compat/java8/OptionConvertersTest.scala index 0b857c3..da99587 100644 --- a/src/test/java/scala/compat/java8/OptionConvertersTest.scala +++ b/src/test/java/scala/compat/java8/OptionConvertersTest.scala @@ -1,6 +1,15 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8 import org.junit.Test @@ -11,7 +20,7 @@ class OptionConvertersTest { import OptionConverters._ @Test - def scalaToEverything() { + def scalaToEverything(): Unit = { val o = Option("fish") val n = (None: Option[String]) val od = Option(2.7) @@ -37,7 +46,7 @@ class OptionConvertersTest { } @Test - def javaGenericToEverything() { + def javaGenericToEverything(): Unit = { val o = Optional.of("fish") val n = Optional.empty[String] val od = Optional.of(2.7) @@ -63,7 +72,7 @@ class OptionConvertersTest { } @Test - def javaOptionalDoubleToEverything() { + def javaOptionalDoubleToEverything(): Unit = { val o = OptionalDouble.of(2.7) val n = OptionalDouble.empty assertEquals(o.asScala, Option(o.getAsDouble)) @@ -73,7 +82,7 @@ class OptionConvertersTest { } @Test - def javaOptionalIntToEverything() { + def javaOptionalIntToEverything(): Unit = { val o = OptionalInt.of(4) val n = OptionalInt.empty assertEquals(o.asScala, Option(o.getAsInt)) @@ -83,7 +92,7 @@ class OptionConvertersTest { } @Test - def javaOptionalLongToEverything() { + def javaOptionalLongToEverything(): Unit = { val o = OptionalLong.of(-1) val n = OptionalLong.empty assertEquals(o.asScala, Option(o.getAsLong)) @@ -93,7 +102,7 @@ class OptionConvertersTest { } @Test - def nonExtensionConversions() { + def nonExtensionConversions(): Unit = { assertEquals(toScala(Optional.of("fish")), Option("fish")) assertEquals(toScala(Optional.empty[String]), None) assertEquals(toJava(Option("fish")), Optional.of("fish")) diff --git a/src/test/java/scala/compat/java8/SpecializedFactoryTest.java b/src/test/java/scala/compat/java8/SpecializedFactoryTest.java deleted file mode 100644 index 2e83412..0000000 --- a/src/test/java/scala/compat/java8/SpecializedFactoryTest.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ -package scala.compat.java8; - -import org.junit.Test; -import scala.runtime.BoxedUnit; - -public class SpecializedFactoryTest { - @Test public void intIntFunction() { - scala.Function1 f1 = JFunction.funcSpecialized((int x) -> x); - assert(f1 instanceof JFunction1$mcII$sp); - - scala.Function1 f2 = JFunction.procSpecialized((int x) -> System.out.print("")); - assert(f2 instanceof JFunction1$mcVI$sp); - - scala.Function0 f3 = JFunction.procSpecialized(() -> System.out.print("")); - assert (f3 instanceof JFunction0$mcV$sp); - } -} diff --git a/src/test/java/scala/compat/java8/SpecializedTest.scala b/src/test/java/scala/compat/java8/SpecializedTest.scala index 564ef76..135c81f 100644 --- a/src/test/java/scala/compat/java8/SpecializedTest.scala +++ b/src/test/java/scala/compat/java8/SpecializedTest.scala @@ -1,13 +1,22 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8 import org.junit.Test -import scala.compat.java8.SpecializedTestSupport.IntIdentity +import SpecializedTestSupport.IntIdentity class SpecializedTest { - @Test def specializationWorks() { + @Test def specializationWorks(): Unit = { val intIdentity: (Int => Int) = new IntIdentity().asInstanceOf[Int => Int] intIdentity(24) // this function checks that it was called via the specialized apply variant. } diff --git a/src/test/java/scala/compat/java8/SpecializedTestSupport.java b/src/test/java/scala/compat/java8/SpecializedTestSupport.java index 8309f26..51f9cd2 100644 --- a/src/test/java/scala/compat/java8/SpecializedTestSupport.java +++ b/src/test/java/scala/compat/java8/SpecializedTestSupport.java @@ -1,11 +1,21 @@ /* - * Copyright (C) 2012-2015 Typesafe Inc. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.compat.java8; import java.util.Arrays; import java.util.List; import org.junit.Assert; +import scala.runtime.java8.*; public class SpecializedTestSupport { public static class IntIdentity implements JFunction1$mcII$sp { diff --git a/src/test/java/scala/compat/java8/StreamConvertersExampleTest.java b/src/test/java/scala/compat/java8/StreamConvertersExampleTest.java new file mode 100644 index 0000000..3a95f03 --- /dev/null +++ b/src/test/java/scala/compat/java8/StreamConvertersExampleTest.java @@ -0,0 +1,28 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +import org.junit.Test; + +import scala.collection.mutable.ArrayBuffer; +import scala.compat.java8.ScalaStreamSupport; + + +public class StreamConvertersExampleTest { + @Test + public void MakeAndUseArrayBuffer() { + ArrayBuffer ab = new ArrayBuffer(); + ab.$plus$eq("salmon"); + ab.$plus$eq("herring"); + assert( ScalaStreamSupport.stream(ab).mapToInt(x -> x.length()).sum() == 13 ); + } +} + diff --git a/src/test/scala-2.13+/scala/compat/java8/StepConvertersTest.scala b/src/test/scala-2.13+/scala/compat/java8/StepConvertersTest.scala new file mode 100644 index 0000000..667e13c --- /dev/null +++ b/src/test/scala-2.13+/scala/compat/java8/StepConvertersTest.scala @@ -0,0 +1,565 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import org.junit.Test +import org.junit.Assert._ + +class StepConvertersTest { + import collectionImpl._ + import converterImpl._ + import StreamConverters._ // Includes StepConverters! + import scala.{ collection => co } + import collection.{ mutable => cm, immutable => ci, concurrent => cc } + + def isAcc[X](x: X): Boolean = x.getClass.getSimpleName.contains("AccumulatorStepper") + + trait SpecCheck { + def check[X](x: X): Boolean + def msg[X](x: X): String + def assert(x: Any): Unit = + if(!check(x)) assertTrue(msg(x), false) + } + object SpecCheck { + def apply(f: Any => Boolean, err: Any => String = (_ => "SpecCheck failed")) = new SpecCheck { + def check[X](x: X): Boolean = f(x) + def msg[X](x: X): String = err(x) + } + } + + def _eh_[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + } + + def IFFY[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + assertTrue(isAcc(x)) + } + + def Okay[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + assertTrue(!isAcc(x)) + } + + def Fine[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + assertTrue(!isAcc(x)) + } + + def good[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + assertTrue(!isAcc(x)) + } + + def Tell[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + println(x.getClass.getName + " -> " + isAcc(x)) + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + } + + @Test + def comprehensivelyGeneric(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[AnyStepper[_]]) + + // Collection section + Okay( co.Iterator[String]("salmon").buffered.stepper ) + good( co.IndexedSeq[String]("salmon").stepper ) + Okay( co.Iterable[String]("salmon").stepper ) + Okay( co.Iterable[String]("salmon").view.stepper ) + Okay( co.Iterator[String]("salmon").stepper ) + Okay( co.LinearSeq[String]("salmon").stepper ) + Okay( co.Map[String, String]("fish" -> "salmon").stepper ) + Okay( co.Map[String, String]("fish" -> "salmon").keyStepper ) + Okay( co.Map[String, String]("fish" -> "salmon").valueStepper ) + Okay( co.Seq[String]("salmon").stepper ) + Okay( co.Seq[String]("salmon").view.stepper ) + Okay( co.Set[String]("salmon").stepper ) + Okay( co.SortedMap[String, String]("fish" -> "salmon").stepper ) + Okay( co.SortedMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( co.SortedMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( co.SortedSet[String]("salmon").stepper ) + IFFY( co.Iterable[String]("salmon").accumulate.stepper ) + IFFY( (co.Iterator[String]("salmon"): co.IterableOnce[String]).accumulate.stepper ) + IFFY( co.Iterable[String]("salmon").view.accumulate.stepper ) + + // Immutable section + Okay( ci.::("salmon", Nil).stepper ) + Okay( (ci.HashMap[String, String]("fish" -> "salmon"): ci.AbstractMap[String, String]).stepper ) + Okay( (ci.HashMap[String, String]("fish" -> "salmon"): ci.AbstractMap[String, String]).keyStepper ) + Okay( (ci.HashMap[String, String]("fish" -> "salmon"): ci.AbstractMap[String, String]).valueStepper ) + good( ci.HashSet[String]("salmon").stepper ) + good( ci.IndexedSeq[String]("salmon").stepper ) + Okay( ci.IntMap[String](123456 -> "salmon").stepper ) + Okay( ci.IntMap[String](123456 -> "salmon").valueStepper ) + Okay( ci.Iterable[String]("salmon").stepper ) + Okay( ci.LinearSeq[String]("salmon").stepper ) + Okay( ci.List[String]("salmon").stepper ) + Okay( ci.ListMap[String, String]("fish" -> "salmon").stepper ) + Okay( ci.ListMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( ci.ListMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( ci.ListSet[String]("salmon").stepper ) + Okay( ci.LongMap[String](9876543210L -> "salmon").stepper ) + Okay( ci.LongMap[String](9876543210L -> "salmon").valueStepper ) + Okay( ci.Map[String, String]("fish" -> "salmon").stepper ) + Okay( ci.Map[String, String]("fish" -> "salmon").keyStepper ) + Okay( ci.Map[String, String]("fish" -> "salmon").valueStepper ) + Okay( ci.Queue[String]("salmon").stepper ) + Okay( ci.Seq[String]("salmon").stepper ) + Okay( ci.Set[String]("salmon").stepper ) + Okay( ci.SortedMap[String, String]("fish" -> "salmon").stepper ) + Okay( ci.SortedMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( ci.SortedMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( ci.SortedSet[String]("salmon").stepper ) + Okay( ci.Stream[String]("salmon").stepper ) + _eh_( ci.Stream[String]("salmon").view.stepper ) + Okay( ci.LazyList[String]("salmon").stepper ) + _eh_( ci.LazyList[String]("salmon").view.stepper ) + IFFY( ci.Iterable[String]("salmon").accumulate.stepper ) + Okay( ci.TreeMap[String, String]("fish" -> "salmon").stepper ) + Okay( ci.TreeMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( ci.TreeMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( ci.TreeSet[String]("salmon").stepper ) + good( ci.Vector[String]("salmon").stepper ) + + // Mutable section + Okay( (cm.ArrayBuffer[String]("salmon"): cm.AbstractBuffer[String]).stepper ) + Okay( (cm.PriorityQueue[String]("salmon"): cm.AbstractIterable[String]).stepper ) + Okay( (cm.HashMap[String, String]("fish" -> "salmon"): cm.AbstractMap[String, String]).stepper ) + Okay( (cm.HashMap[String, String]("fish" -> "salmon"): cm.AbstractMap[String, String]).keyStepper ) + Okay( (cm.HashMap[String, String]("fish" -> "salmon"): cm.AbstractMap[String, String]).valueStepper ) + Okay( (cm.ArrayBuffer[String]("salmon"): cm.AbstractSeq[String]).stepper ) + Okay( (cm.HashSet[String]("salmon"): cm.AbstractSet[String]).stepper ) + Okay( cm.AnyRefMap[String,String]("fish" -> "salmon").stepper ) + Okay( cm.AnyRefMap[String,String]("fish" -> "salmon").keyStepper ) + Okay( cm.AnyRefMap[String,String]("fish" -> "salmon").valueStepper ) + good( cm.ArrayBuffer[String]("salmon").stepper ) + good( (Array("salmon"): cm.ArraySeq[String]).stepper ) + good( cm.ArraySeq[String]("salmon").stepper ) + _eh_( cm.ArrayStack[String]("salmon").stepper ) + Okay( (cm.ArrayBuffer[String]("salmon"): cm.Buffer[String]).stepper ) + good( cm.HashMap[String, String]("fish" -> "salmon").stepper ) + good( cm.HashMap[String, String]("fish" -> "salmon").keyStepper ) + good( cm.HashMap[String, String]("fish" -> "salmon").valueStepper ) + good( cm.HashSet[String]("salmon").stepper ) + good( cm.IndexedSeq[String]("salmon").stepper ) + good( cm.IndexedSeq[String]("salmon").view.stepper ) + Okay( cm.Iterable[String]("salmon").stepper ) + good( cm.LinkedHashMap[String, String]("fish" -> "salmon").stepper ) + good( cm.LinkedHashMap[String, String]("fish" -> "salmon").keyStepper ) + good( cm.LinkedHashMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( cm.LinkedHashSet[String]("salmon").stepper ) + Okay( cm.ListBuffer[String]("salmon").stepper ) + Okay( cm.ListMap[String, String]("fish" -> "salmon").stepper ) + Okay( cm.ListMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.ListMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.LongMap[String](9876543210L -> "salmon").stepper ) + Okay( cm.LongMap[String](9876543210L -> "salmon").valueStepper ) + Okay( cm.Map[String, String]("fish" -> "salmon").stepper ) + Okay( cm.Map[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.Map[String, String]("fish" -> "salmon").valueStepper ) + Okay( cm.OpenHashMap[String, String]("fish" -> "salmon").stepper ) + Okay( cm.OpenHashMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.OpenHashMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( cm.PriorityQueue[String]("salmon").stepper ) + Fine( cm.Queue[String]("salmon").stepper ) // Used to be `Good` in 2.12, in 2.13 `Queue` is no longer a `LinearSeq` + Okay( cm.Seq[String]("salmon").stepper ) + Okay( cm.Set[String]("salmon").stepper ) + Okay( cm.SortedSet[String]("salmon").stepper ) + Fine( cm.Stack[String]("salmon").stepper ) // Used to be `Good` in 2.12, in 2.13 `Stack` is no longer a `LinearSeq` + IFFY( cm.Iterable[String]("salmon").accumulate.stepper ) + Okay( cm.TreeSet[String]("salmon").stepper ) + Okay( cm.UnrolledBuffer[String]("salmon").stepper ) + Okay( cm.WeakHashMap[String, String]("fish" -> "salmon").stepper ) + Okay( cm.WeakHashMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.WeakHashMap[String, String]("fish" -> "salmon").valueStepper ) + + // Java 6 converters section + + // Concurrent section + Okay( cc.TrieMap[String, String]("fish" -> "salmon").stepper ) + Okay( cc.TrieMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cc.TrieMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( (cc.TrieMap[String, String]("fish" -> "salmon"): cc.Map[String, String]).stepper ) + Okay( (cc.TrieMap[String, String]("fish" -> "salmon"): cc.Map[String, String]).keyStepper ) + Okay( (cc.TrieMap[String, String]("fish" -> "salmon"): cc.Map[String, String]).valueStepper ) + } + + @Test + def comprehensivelyDouble(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[DoubleStepper]) + //Double-specific tests + + // Collection section + Okay( co.Iterator[Double](3.14159).buffered.stepper ) + good( co.IndexedSeq[Double](3.14159).stepper ) + Okay( co.Iterable[Double](3.14159).stepper ) + Okay( co.Iterable[Double](3.14159).view.stepper ) + Okay( co.Iterator[Double](3.14159).stepper ) + Okay( co.LinearSeq[Double](3.14159).stepper ) + Okay( co.Map[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( co.Map[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( co.Seq[Double](3.14159).stepper ) + Okay( co.Seq[Double](3.14159).view.stepper ) + Okay( co.Set[Double](3.14159).stepper ) + Okay( co.SortedMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( co.SortedMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( co.SortedSet[Double](3.14159).stepper ) + IFFY( co.Iterable[Double](3.14159).accumulate.stepper ) + IFFY( (co.Iterator[Double](3.14159): co.IterableOnce[Double]).accumulate.stepper ) + IFFY( co.Iterable[Double](3.14159).view.accumulate.stepper ) + + // Immutable section + Okay( ci.::(3.14159, Nil).stepper ) + Okay( (ci.HashMap[Double, Double](2.718281828 -> 3.14159): ci.AbstractMap[Double, Double]).keyStepper ) + Okay( (ci.HashMap[Double, Double](2.718281828 -> 3.14159): ci.AbstractMap[Double, Double]).valueStepper ) + good( ci.HashSet[Double](3.14159).stepper ) + good( ci.IndexedSeq[Double](3.14159).stepper ) + Okay( ci.IntMap[Double](123456 -> 3.14159).valueStepper ) + Okay( ci.Iterable[Double](3.14159).stepper ) + Okay( ci.LinearSeq[Double](3.14159).stepper ) + Okay( ci.List[Double](3.14159).stepper ) + Okay( ci.ListMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( ci.ListMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( ci.ListSet[Double](3.14159).stepper ) + Okay( ci.LongMap[Double](9876543210L -> 3.14159).valueStepper ) + Okay( ci.Map[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( ci.Map[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( ci.Queue[Double](3.14159).stepper ) + Okay( ci.Seq[Double](3.14159).stepper ) + Okay( ci.Set[Double](3.14159).stepper ) + Okay( ci.SortedMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( ci.SortedMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( ci.SortedSet[Double](3.14159).stepper ) + Okay( ci.Stream[Double](3.14159).stepper ) + _eh_( ci.Stream[Double](3.14159).view.stepper ) + Okay( ci.LazyList[Double](3.14159).stepper ) + _eh_( ci.LazyList[Double](3.14159).view.stepper ) + IFFY( ci.Iterable[Double](3.14159).accumulate.stepper ) + Okay( ci.TreeMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( ci.TreeMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( ci.TreeSet[Double](3.14159).stepper ) + good( ci.Vector[Double](3.14159).stepper ) + + // Mutable section + Okay( (cm.ArrayBuffer[Double](3.14159): cm.AbstractBuffer[Double]).stepper ) + Okay( (cm.PriorityQueue[Double](3.14159): cm.AbstractIterable[Double]).stepper ) + Okay( (cm.HashMap[Double, Double](2.718281828 -> 3.14159): cm.AbstractMap[Double, Double]).keyStepper ) + Okay( (cm.HashMap[Double, Double](2.718281828 -> 3.14159): cm.AbstractMap[Double, Double]).valueStepper ) + Okay( (cm.ArrayBuffer[Double](3.14159): cm.AbstractSeq[Double]).stepper ) + Okay( (cm.HashSet[Double](3.14159): cm.AbstractSet[Double]).stepper ) + Okay( cm.AnyRefMap[String,Double]("fish" -> 3.14159).valueStepper ) + good( cm.ArrayBuffer[Double](3.14159).stepper ) + good( (Array(3.14159): cm.ArraySeq[Double]).stepper ) + good( cm.ArraySeq[Double](3.14159).stepper ) + _eh_( cm.ArrayStack[Double](3.14159).stepper ) + Okay( (cm.ArrayBuffer[Double](3.14159): cm.Buffer[Double]).stepper ) + good( cm.HashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + good( cm.HashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + good( cm.HashSet[Double](3.14159).stepper ) + good( cm.IndexedSeq[Double](3.14159).stepper ) + good( cm.IndexedSeq[Double](3.14159).view.stepper ) + Okay( cm.Iterable[Double](3.14159).stepper ) + good( cm.LinkedHashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + good( cm.LinkedHashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( cm.LinkedHashSet[Double](3.14159).stepper ) + Okay( cm.ListBuffer[Double](3.14159).stepper ) + Okay( cm.ListMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cm.ListMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( cm.LongMap[Double](9876543210L -> 3.14159).valueStepper ) + Okay( cm.Map[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cm.Map[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( cm.OpenHashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cm.OpenHashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( cm.PriorityQueue[Double](3.14159).stepper ) + Fine( cm.Queue[Double](3.14159).stepper ) // Used to be `Good` in 2.12, in 2.13 `Queue` is no longer a `LinearSeq` + Okay( cm.Seq[Double](3.14159).stepper ) + Okay( cm.Set[Double](3.14159).stepper ) + Okay( cm.SortedSet[Double](3.14159).stepper ) + Fine( cm.Stack[Double](3.14159).stepper ) // Used to be `Good` in 2.12, in 2.13 `Stack` is no longer a `LinearSeq` + IFFY( cm.Iterable[Double](3.14159).accumulate.stepper ) + Okay( cm.TreeSet[Double](3.14159).stepper ) + Okay( cm.UnrolledBuffer[Double](3.14159).stepper ) + Okay( cm.WeakHashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cm.WeakHashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + + // Java 6 converters section + + // Concurrent section + Okay( cc.TrieMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cc.TrieMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( (cc.TrieMap[Double, Double](2.718281828 -> 3.14159): cc.Map[Double, Double]).keyStepper ) + Okay( (cc.TrieMap[Double, Double](2.718281828 -> 3.14159): cc.Map[Double, Double]).valueStepper ) + } + + @Test + def comprehensivelyInt(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[IntStepper], x => s"$x should be an IntStepper") + + // Int-specific tests + good( co.BitSet(42).stepper ) + good( ci.BitSet(42).stepper ) + good( ci.NumericRange(123456, 123458, 1).stepper ) + good( cm.BitSet(42).stepper ) + good( (1 until 2).stepper ) + Okay( ci.IntMap[String](123456 -> "salmon").keyStepper ) + Okay( ci.IntMap[Double](123456 -> 3.14159).keyStepper ) + Okay( ci.IntMap[Long](123456 -> 0x123456789L).keyStepper ) + + // Collection section + Okay( co.Iterator[Int](654321).buffered.stepper ) + good( co.IndexedSeq[Int](654321).stepper ) + Okay( co.Iterable[Int](654321).stepper ) + Okay( co.Iterable[Int](654321).view.stepper ) + Okay( co.Iterator[Int](654321).stepper ) + Okay( co.LinearSeq[Int](654321).stepper ) + Okay( co.Map[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( co.Map[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( co.Seq[Int](654321).stepper ) + Okay( co.Seq[Int](654321).view.stepper ) + Okay( co.Set[Int](654321).stepper ) + Okay( co.SortedMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( co.SortedMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( co.SortedSet[Int](654321).stepper ) + IFFY( co.Iterable[Int](654321).accumulate.stepper ) + IFFY( (co.Iterator[Int](654321): co.IterableOnce[Int]).accumulate.stepper ) + IFFY( co.Iterable[Int](654321).view.accumulate.stepper ) + + // Immutable section + Okay( ci.::(654321, Nil).stepper ) + Okay( (ci.HashMap[Int, Int](0xDEEDED -> 654321): ci.AbstractMap[Int, Int]).keyStepper ) + Okay( (ci.HashMap[Int, Int](0xDEEDED -> 654321): ci.AbstractMap[Int, Int]).valueStepper ) + good( ci.HashSet[Int](654321).stepper ) + good( ci.IndexedSeq[Int](654321).stepper ) + Okay( ci.IntMap[Int](123456 -> 654321).keyStepper ) + Okay( ci.IntMap[Int](123456 -> 654321).valueStepper ) + Okay( ci.Iterable[Int](654321).stepper ) + Okay( ci.LinearSeq[Int](654321).stepper ) + Okay( ci.List[Int](654321).stepper ) + Okay( ci.ListMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( ci.ListMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( ci.ListSet[Int](654321).stepper ) + Okay( ci.LongMap[Int](9876543210L -> 654321).valueStepper ) + Okay( ci.Map[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( ci.Map[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( ci.Queue[Int](654321).stepper ) + Okay( ci.Seq[Int](654321).stepper ) + Okay( ci.Set[Int](654321).stepper ) + Okay( ci.SortedMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( ci.SortedMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( ci.SortedSet[Int](654321).stepper ) + Okay( ci.Stream[Int](654321).stepper ) + _eh_( ci.Stream[Int](654321).view.stepper ) + Okay( ci.LazyList[Int](654321).stepper ) + _eh_( ci.LazyList[Int](654321).view.stepper ) + IFFY( ci.Iterable[Int](654321).accumulate.stepper ) + Okay( ci.TreeMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( ci.TreeMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( ci.TreeSet[Int](654321).stepper ) + good( ci.Vector[Int](654321).stepper ) + + // Mutable section + Okay( (cm.ArrayBuffer[Int](654321): cm.AbstractBuffer[Int]).stepper ) + Okay( (cm.PriorityQueue[Int](654321): cm.AbstractIterable[Int]).stepper ) + Okay( (cm.HashMap[Int, Int](0xDEEDED -> 654321): cm.AbstractMap[Int, Int]).keyStepper ) + Okay( (cm.HashMap[Int, Int](0xDEEDED -> 654321): cm.AbstractMap[Int, Int]).valueStepper ) + Okay( (cm.ArrayBuffer[Int](654321): cm.AbstractSeq[Int]).stepper ) + Okay( (cm.HashSet[Int](654321): cm.AbstractSet[Int]).stepper ) + Okay( cm.AnyRefMap[String, Int]("fish" -> 654321).valueStepper ) + good( cm.ArrayBuffer[Int](654321).stepper ) + good( (Array(654321): cm.ArraySeq[Int]).stepper ) + good( cm.ArraySeq[Int](654321).stepper ) + _eh_( cm.ArrayStack[Int](654321).stepper ) + Okay( (cm.ArrayBuffer[Int](654321): cm.Buffer[Int]).stepper ) + good( cm.HashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + good( cm.HashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + good( cm.HashSet[Int](654321).stepper ) + good( cm.IndexedSeq[Int](654321).stepper ) + good( cm.IndexedSeq[Int](654321).view.stepper ) + Okay( cm.Iterable[Int](654321).stepper ) + good( cm.LinkedHashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + good( cm.LinkedHashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( cm.LinkedHashSet[Int](654321).stepper ) + Okay( cm.ListBuffer[Int](654321).stepper ) + Okay( cm.ListMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cm.ListMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( cm.LongMap[Int](9876543210L -> 654321).valueStepper ) + Okay( cm.Map[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cm.Map[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( cm.OpenHashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cm.OpenHashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( cm.PriorityQueue[Int](654321).stepper ) + Fine( cm.Queue[Int](654321).stepper ) // Used to be `Good` in 2.12, in 2.13 `Queue` is no longer a `LinearSeq` + Okay( cm.Seq[Int](654321).stepper ) + Okay( cm.Set[Int](654321).stepper ) + Okay( cm.SortedSet[Int](654321).stepper ) + Fine( cm.Stack[Int](654321).stepper ) // Used to be `Good` in 2.12, in 2.13 `Stack` is no longer a `LinearSeq` + IFFY( cm.Iterable[Int](654321).accumulate.stepper ) + Okay( cm.TreeSet[Int](654321).stepper ) + Okay( cm.UnrolledBuffer[Int](654321).stepper ) + Okay( cm.WeakHashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cm.WeakHashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + + // Java 6 converters section + + // Concurrent section + Okay( cc.TrieMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cc.TrieMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( (cc.TrieMap[Int, Int](0xDEEDED -> 654321): cc.Map[Int, Int]).keyStepper ) + Okay( (cc.TrieMap[Int, Int](0xDEEDED -> 654321): cc.Map[Int, Int]).valueStepper ) + } + + @Test + def shortWidening(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[IntStepper], x => s"$x should be an IntStepper") + + good( Array[Short](654321.toShort).stepper ) + good( (Array[Short](654321.toShort): cm.ArraySeq[Short]).stepper ) + + //TODO: None of these currently work because there are no native Stepper implementations: + + //good( ci.NumericRange(123456.toShort, 123458.toShort, 1.toShort).stepper ) + //good( ((Array[Short](654321.toShort): cm.ArraySeq[Short]): cm.ArrayLike[Short, cm.ArraySeq[Short]]).stepper ) + //good( (Array[Short](654321.toShort): cm.ArrayOps[Short]).stepper ) + //good( cm.ResizableArray[Short](654321.toShort).stepper ) + } + + @Test + def comprehensivelyLong(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[LongStepper]) + + // Long-specific tests + good( ci.NumericRange(9876543210L, 9876543212L, 1L).stepper ) + Okay( ci.LongMap[String](9876543210L -> "salmon").keyStepper ) + Okay( cm.LongMap[String](9876543210L -> "salmon").keyStepper ) + Okay( ci.LongMap[Double](9876543210L -> 3.14159).keyStepper ) + Okay( cm.LongMap[Double](9876543210L -> 3.14159).keyStepper ) + Okay( ci.LongMap[Int](9876543210L -> 654321).keyStepper ) + Okay( cm.LongMap[Int](9876543210L -> 654321).keyStepper ) + + // Collection section + Okay( co.Iterator[Long](0x123456789L).buffered.stepper ) + good( co.IndexedSeq[Long](0x123456789L).stepper ) + Okay( co.Iterable[Long](0x123456789L).stepper ) + Okay( co.Iterable[Long](0x123456789L).view.stepper ) + Okay( co.Iterator[Long](0x123456789L).stepper ) + Okay( co.LinearSeq[Long](0x123456789L).stepper ) + Okay( co.Map[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( co.Map[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( co.Seq[Long](0x123456789L).stepper ) + Okay( co.Seq[Long](0x123456789L).view.stepper ) + Okay( co.Set[Long](0x123456789L).stepper ) + Okay( co.SortedMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( co.SortedMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( co.SortedSet[Long](0x123456789L).stepper ) + IFFY( co.Iterable[Long](0x123456789L).accumulate.stepper ) + IFFY( (co.Iterator[Long](0x123456789L): co.IterableOnce[Long]).accumulate.stepper ) + IFFY( co.Iterable[Long](0x123456789L).view.accumulate.stepper ) + + // Immutable section + Okay( ci.::(0x123456789L, Nil).stepper ) + Okay( (ci.HashMap[Long, Long](1234567654321L -> 0x123456789L): ci.AbstractMap[Long, Long]).keyStepper ) + Okay( (ci.HashMap[Long, Long](1234567654321L -> 0x123456789L): ci.AbstractMap[Long, Long]).valueStepper ) + good( ci.HashSet[Long](0x123456789L).stepper ) + good( ci.IndexedSeq[Long](0x123456789L).stepper ) + Okay( ci.IntMap[Long](123456 -> 0x123456789L).valueStepper ) + Okay( ci.Iterable[Long](0x123456789L).stepper ) + Okay( ci.LinearSeq[Long](0x123456789L).stepper ) + Okay( ci.List[Long](0x123456789L).stepper ) + Okay( ci.ListMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( ci.ListMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( ci.ListSet[Long](0x123456789L).stepper ) + Okay( ci.LongMap[Long](9876543210L -> 0x123456789L).keyStepper ) + Okay( ci.LongMap[Long](9876543210L -> 0x123456789L).valueStepper ) + Okay( ci.Map[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( ci.Map[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( ci.Queue[Long](0x123456789L).stepper ) + Okay( ci.Seq[Long](0x123456789L).stepper ) + Okay( ci.Set[Long](0x123456789L).stepper ) + Okay( ci.SortedMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( ci.SortedMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( ci.SortedSet[Long](0x123456789L).stepper ) + Okay( ci.Stream[Long](0x123456789L).stepper ) + _eh_( ci.Stream[Long](0x123456789L).view.stepper ) + Okay( ci.LazyList[Long](0x123456789L).stepper ) + _eh_( ci.LazyList[Long](0x123456789L).view.stepper ) + IFFY( ci.Iterable[Long](0x123456789L).accumulate.stepper ) + Okay( ci.TreeMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( ci.TreeMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( ci.TreeSet[Long](0x123456789L).stepper ) + good( ci.Vector[Long](0x123456789L).stepper ) + + // Mutable section + Okay( (cm.ArrayBuffer[Long](0x123456789L): cm.AbstractBuffer[Long]).stepper ) + Okay( (cm.PriorityQueue[Long](0x123456789L): cm.AbstractIterable[Long]).stepper ) + Okay( (cm.HashMap[Long, Long](1234567654321L -> 0x123456789L): cm.AbstractMap[Long, Long]).keyStepper ) + Okay( (cm.HashMap[Long, Long](1234567654321L -> 0x123456789L): cm.AbstractMap[Long, Long]).valueStepper ) + Okay( (cm.ArrayBuffer[Long](0x123456789L): cm.AbstractSeq[Long]).stepper ) + Okay( (cm.HashSet[Long](0x123456789L): cm.AbstractSet[Long]).stepper ) + Okay( cm.AnyRefMap[String,Long]("fish" -> 0x123456789L).valueStepper ) + good( cm.ArrayBuffer[Long](0x123456789L).stepper ) + good( (Array(0x123456789L): cm.ArraySeq[Long]).stepper ) + good( cm.ArraySeq[Long](0x123456789L).stepper ) + _eh_( cm.ArrayStack[Long](0x123456789L).stepper ) + Okay( (cm.ArrayBuffer[Long](0x123456789L): cm.Buffer[Long]).stepper ) + good( cm.HashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + good( cm.HashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + good( cm.HashSet[Long](0x123456789L).stepper ) + good( cm.IndexedSeq[Long](0x123456789L).stepper ) + good( cm.IndexedSeq[Long](0x123456789L).view.stepper ) + Okay( cm.Iterable[Long](0x123456789L).stepper ) + good( cm.LinkedHashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + good( cm.LinkedHashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( cm.LinkedHashSet[Long](0x123456789L).stepper ) + Okay( cm.ListBuffer[Long](0x123456789L).stepper ) + Okay( cm.ListMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cm.ListMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( cm.LongMap[Long](9876543210L -> 0x123456789L).keyStepper ) + Okay( cm.LongMap[Long](9876543210L -> 0x123456789L).valueStepper ) + Okay( cm.Map[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cm.Map[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( cm.OpenHashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cm.OpenHashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( cm.PriorityQueue[Long](0x123456789L).stepper ) + Fine( cm.Queue[Long](0x123456789L).stepper ) // Used to be `Good` in 2.12, in 2.13 `Queue` is no longer a `LinearSeq` + Okay( cm.Seq[Long](0x123456789L).stepper ) + Okay( cm.Set[Long](0x123456789L).stepper ) + Okay( cm.SortedSet[Long](0x123456789L).stepper ) + Fine( cm.Stack[Long](0x123456789L).stepper ) // Used to be `Good` in 2.12, in 2.13 `Stack` is no longer a `LinearSeq` + IFFY( cm.Iterable[Long](0x123456789L).accumulate.stepper ) + Okay( cm.TreeSet[Long](0x123456789L).stepper ) + Okay( cm.UnrolledBuffer[Long](0x123456789L).stepper ) + Okay( cm.WeakHashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cm.WeakHashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + + // Java 6 converters section + + // Concurrent section + Okay( cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( (cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L): cc.Map[Long, Long]).keyStepper ) + Okay( (cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L): cc.Map[Long, Long]).valueStepper ) + } + + @Test + def comprehensivelySpecific(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[IntStepper], x => s"$x should be an IntStepper") + + good( ci.NumericRange(277: Short, 279: Short, 1: Short).stepper ) + good( ("salmon": ci.WrappedString).stepper ) + } +} diff --git a/src/test/scala-2.13+/scala/compat/java8/StepperTest.scala b/src/test/scala-2.13+/scala/compat/java8/StepperTest.scala new file mode 100644 index 0000000..e8e51ec --- /dev/null +++ b/src/test/scala-2.13+/scala/compat/java8/StepperTest.scala @@ -0,0 +1,319 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import java.util + +import org.junit.Test +import org.junit.Assert._ +import java.util.Spliterator + +import collectionImpl._ +import StreamConverters._ +import scala.collection.{AnyStepper, IntStepper} + + +class IncStepperA(private val size0: Long) extends IntStepper { + if (size0 < 0) throw new IllegalArgumentException("Size must be >= 0L") + private var i = 0L + def characteristics = Spliterator.SIZED | Spliterator.SUBSIZED | Spliterator.ORDERED + override def estimateSize: Long = math.max(0L, size0 - i) + def hasStep = i < size0 + def nextStep() = { i += 1; (i - 1).toInt } + def trySplit() = if (estimateSize <= 1) null else { + val sub = new IncStepperA(size0 - (size0 - i)/2) + sub.i = i + i = sub.size0 + sub + } +} + +class IncSpliterator(private val size0: Long) extends Spliterator.OfInt { + if (size0 < 0) throw new IllegalArgumentException("Size must be >= 0L") + private var i = 0L + def characteristics = Spliterator.SIZED | Spliterator.SUBSIZED | Spliterator.ORDERED + def estimateSize() = math.max(0L, size0 - i) + def tryAdvance(f: java.util.function.IntConsumer): Boolean = if (i >= size0) false else { f.accept(i.toInt); i += 1; true } + def trySplit(): Spliterator.OfInt = if (i+1 >= size0) null else { + val sub = new IncSpliterator(size0 - (size0 - i)/2) + sub.i = i + i = sub.size0 + sub + } + override def forEachRemaining(f: java.util.function.IntConsumer): Unit = { while (i < size0) { f.accept(i.toInt); i += 1 } } +} + +class MappingStepper[@specialized (Double, Int, Long) A, @specialized(Double, Int, Long) B](underlying: Stepper[A], mapping: A => B) extends Stepper[B] { + def characteristics = underlying.characteristics + def hasStep = underlying.hasStep + def nextStep() = mapping(underlying.nextStep()) + + override def trySplit(): Stepper[B] = { + val r = underlying.trySplit() + if (r == null) null else new MappingStepper[A, B](r, mapping) + } + + override def estimateSize: Long = underlying.estimateSize + + override def javaIterator[C >: B]: util.Iterator[_] = new util.Iterator[B] { + override def hasNext: Boolean = underlying.hasStep + override def next(): B = mapping(underlying.nextStep()) + } + def substep() = { + val undersub = underlying.substep() + if (undersub == null) null + else new MappingStepper(undersub, mapping) + } + def spliterator[C >: B]: Spliterator[_] = new MappingSpliterator[A, B](underlying.spliterator.asInstanceOf[Spliterator[A]], mapping) +} + +class MappingSpliterator[A, B](private val underlying: Spliterator[A], mapping: A => B) extends Spliterator[B] { + def characteristics = underlying.characteristics + def estimateSize() = underlying.estimateSize() + def tryAdvance(f: java.util.function.Consumer[_ >: B]): Boolean = underlying.tryAdvance(new java.util.function.Consumer[A]{ def accept(a: A): Unit = { f.accept(mapping(a)) } }) + def trySplit(): Spliterator[B] = { + val undersplit = underlying.trySplit() + if (undersplit == null) null + else new MappingSpliterator(undersplit, mapping) + } +} +class IntToGenericSpliterator[A](private val underlying: Spliterator.OfInt, mapping: Int => A) extends Spliterator[A] { + def characteristics = underlying.characteristics + def estimateSize() = underlying.estimateSize() + def tryAdvance(f: java.util.function.Consumer[_ >: A]): Boolean = underlying.tryAdvance(new java.util.function.IntConsumer{ def accept(a: Int): Unit = { f.accept(mapping(a)) } }) + def trySplit(): Spliterator[A] = { + val undersplit = underlying.trySplit() + if (undersplit == null) null + else new IntToGenericSpliterator[A](undersplit, mapping) + } +} +class IntToDoubleSpliterator(private val underlying: Spliterator.OfInt, mapping: Int => Double) extends Spliterator.OfDouble { + def characteristics = underlying.characteristics + def estimateSize() = underlying.estimateSize() + def tryAdvance(f: java.util.function.DoubleConsumer): Boolean = underlying.tryAdvance(new java.util.function.IntConsumer{ def accept(a: Int): Unit = { f.accept(mapping(a)) } }) + def trySplit(): Spliterator.OfDouble = { + val undersplit = underlying.trySplit() + if (undersplit == null) null + else new IntToDoubleSpliterator(undersplit, mapping) + } +} +class IntToLongSpliterator(private val underlying: Spliterator.OfInt, mapping: Int => Long) extends Spliterator.OfLong { + def characteristics = underlying.characteristics + def estimateSize() = underlying.estimateSize() + def tryAdvance(f: java.util.function.LongConsumer): Boolean = underlying.tryAdvance(new java.util.function.IntConsumer{ def accept(a: Int): Unit = { f.accept(mapping(a)) } }) + def trySplit(): Spliterator.OfLong = { + val undersplit = underlying.trySplit() + if (undersplit == null) null + else new IntToLongSpliterator(undersplit, mapping) + } +} + +class SpliteratorStepper[A](sp: Spliterator[A]) extends AnyStepper[A] { + override def trySplit(): AnyStepper[A] = { + val r = sp.trySplit() + if (r == null) null else new SpliteratorStepper(r) + } + + var cache: AnyRef = null + + override def hasStep: Boolean = cache != null || sp.tryAdvance(x => cache = x.asInstanceOf[AnyRef]) + + override def nextStep(): A = if (hasStep) { + val r = cache + cache = null + r.asInstanceOf[A] + } else throw new NoSuchElementException("") + + override def estimateSize: Long = sp.estimateSize() + + override def characteristics: Int = sp.characteristics() +} + +class StepperTest { + def subs[Z, A, CC <: Stepper[A]](zero: Z)(s: Stepper[A])(f: Stepper[A] => Z, op: (Z, Z) => Z): Z = { + val ss = s.substep() + if (ss == null) op(zero, f(s)) + else { + val left = subs(zero)(ss)(f, op) + subs(left)(s)(f, op) + } + } + + val sizes = Vector(0, 1, 2, 4, 15, 17, 2512) + def sources: Vector[(Int, Stepper[Int])] = sizes.flatMap{ i => + Vector( + i -> new IncStepperA(i), + i -> new SpliteratorStepper(new IncSpliterator(i).asInstanceOf[Spliterator[Int]]), + i -> new MappingStepper[Int,Int](new IncStepperA(i), x => x), + i -> new MappingStepper[Long, Int](new SpliteratorStepper(new IntToLongSpliterator(new IncSpliterator(i), _.toLong).asInstanceOf[Spliterator[Long]]), _.toInt), + i -> new MappingStepper[Double, Int](new SpliteratorStepper(new IntToDoubleSpliterator(new IncSpliterator(i), _.toDouble).asInstanceOf[Spliterator[Double]]), _.toInt), + i -> new MappingStepper[String, Int](new SpliteratorStepper(new IntToGenericSpliterator[String](new IncSpliterator(i), _.toString)), _.toInt) + ) + } + + @Test + def stepping(): Unit = { + sources.foreach{ case (i, s) => assert((0 until i).forall{ j => s.hasStep && s.nextStep() == j } && !s.hasStep) } + sources.foreach{ case (i, s) => + val set = collection.mutable.BitSet.empty + subs(0)(s)( + { x => + while (x.hasStep) { val y = x.nextStep(); assert(!(set contains y)); set += y } + 0 + }, + _ + _ + ) + assert((0 until i).toSet == set) + } + } + + @Test + def trying(): Unit = { + sources.foreach{ case (i,s) => + val set = collection.mutable.BitSet.empty + while (s.hasStep) { val y = s.nextStep(); assert(!(set contains y)); set += y } + assert((0 until i).toSet == set) + } + sources.foreach{ case (i,s) => + val set = collection.mutable.BitSet.empty + subs(0)(s)( + { x => + while (x.hasStep) { val y = x.nextStep(); assert(!(set contains y)); set += y } + 0 + }, + _ + _ + ) + assertTrue(s.getClass.getName + s" said [0, $i) was " + set.mkString("{", " ", "}"), (0 until i).toSet == set) + } + } + + @Test + def substepping(): Unit = { + sources.foreach{ case (i,s) => + val ss = s.substep() + assertEquals(ss == null, i < 2) + if (ss != null) { + assertTrue(s.hasStep) + assertTrue(ss.hasStep) + val c1 = s.count() + val c2 = ss.count() + assertEquals(s"$i != $c1 + $c2 from ${s.getClass.getName}", i, c1 + c2) + } + else assertEquals(i, s.count()) + } + } + + @Test + def characteristically(): Unit = { + val expected = Spliterator.SIZED | Spliterator.SUBSIZED | Spliterator.ORDERED + sources.foreach{ case (_,s) => assertEquals(s.characteristics, expected)} + sources.foreach{ case (_,s) => subs(0)(s)(x => { assertEquals(x.characteristics, expected); 0 }, _ + _) } + } + + @Test + def count_only(): Unit = { + sources.foreach{ case (i, s) => assertEquals(i, s.count()) } + sources.foreach{ case (i, s) => assertEquals(i, subs(0)(s)(_.count().toInt, _ + _)) } + } + + @Test + def count_conditionally(): Unit = { + sources.foreach{ case (i, s) => assertEquals((0 until i).count(_ % 3 == 0), s.count(_ % 3 == 0)) } + sources.foreach{ case (i, s) => assertEquals((0 until i).count(_ % 3 == 0), subs(0)(s)(_.count(_ % 3 == 0).toInt, _ + _)) } + } + + @Test + def existence(): Unit = { + sources.foreach{ case (i, s) => assert(i > 0 == s.exists(_ >= 0)) } + sources.foreach{ case (i, s) => assert(i > 16 == s.exists(_ % 17 == 16)) } + sources.foreach{ case (i, s) => assert(i > 0 == subs(false)(s)(_.exists(_ >= 0), _ || _)) } + sources.foreach{ case (i, s) => assert(i > 16 == subs(false)(s)(_.exists(_ % 17 == 16), _ || _)) } + } + + @Test + def finding(): Unit = { + for (k <- 0 until 100) { + (sources zip sources).foreach{ case ((i,s), (j,t)) => + val x = scala.util.Random.nextInt(math.min(i,j)+3) + val a = s.find(_ == x) + val b = subs(None: Option[Int])(t)(_.find(_ == x), _ orElse _) + assertEquals(a, b) + assertEquals(a.isDefined, x < math.min(i,j)) + } + } + } + + @Test + def folding(): Unit = { + sources.foreach{ case (i,s) => assertEquals((0 until i).mkString, s.fold("")(_ + _.toString)) } + sources.foreach{ case (i,s) => assertEquals((0 until i).mkString, subs("")(s)(_.fold("")(_ + _.toString), _ + _)) } + sources.foreach{ case (i,s) => assertEquals((0 until i).map(_.toDouble).sum, s.fold(0.0)(_ + _), 1e-10) } + sources.foreach{ case (i,s) => assertEquals((0 until i).map(_.toDouble).sum, subs(0.0)(s)(_.fold(0.0)(_ + _), _ + _), 1e-10) } + } + + @Test + def foldingUntil(): Unit = { + def expected(i: Int) = (0 until i).scan(0)(_ + _).dropWhile(_ < 6*i).headOption.getOrElse((0 until i).sum) + sources.foreach{ case (i,s) => assertEquals(expected(i), s.foldTo(0)(_ + _)(_ >= 6*i)) } + sources.foreach{ case (_,s) => assertEquals(-1, s.foldTo(-1)(_ * _)(_ => true)) } + sources.foreach{ case (i,s) => + val ss = s.substep() + val x = s.foldTo( if (ss == null) 0 else ss.foldTo(0)(_ + _)(_ >= 6*i) )(_ + _)(_ >= 6*i) + assertEquals(expected(i), x) + } + } + + @Test + def foreaching(): Unit = { + sources.foreach{ case (i,s) => + val clq = new java.util.concurrent.ConcurrentLinkedQueue[String] + s.foreach( clq add _.toString ) + assertEquals((0 until i).map(_.toString).toSet, Iterator.continually(if (!clq.isEmpty) Some(clq.poll) else None).takeWhile(_.isDefined).toSet.flatten) + } + sources.foreach{ case (i,s) => + val clq = new java.util.concurrent.ConcurrentLinkedQueue[String] + subs(())(s)(_.foreach( clq add _.toString ), (_, _) => ()) + assertEquals((0 until i).map(_.toString).toSet, Iterator.continually(if (!clq.isEmpty) Some(clq.poll) else None).takeWhile(_.isDefined).toSet.flatten) + } + } + + @Test + def reducing(): Unit = { + sources.foreach{ case (i,s) => + if (i==0) assertEquals(s.hasStep, false) + else assertEquals((0 until i).sum, s.reduce(_ + _)) + } + sources.foreach{ case (i,s) => + assertEquals((0 until i).sum, subs(0)(s)(x => if (!x.hasStep) 0 else x.reduce(_ + _), _ + _)) + } + } + + @Test + def iterating(): Unit = { + sources.foreach{ case (i, s) => assert(Iterator.range(0,i) sameElements s.iterator) } + } + + @Test + def spliterating(): Unit = { + sources.foreach{ case (i,s) => + var sum = 0 + s.spliterator.asInstanceOf[Spliterator[Int]].forEachRemaining(new java.util.function.Consumer[Int]{ def accept(i: Int): Unit = { sum += i } }) + assertEquals(sum, (0 until i).sum) + } + sources.foreach{ case (i,s) => + val sum = subs(0)(s)(x => { var sm = 0; x.spliterator.asInstanceOf[Spliterator[Int]].forEachRemaining(new java.util.function.Consumer[Int]{ def accept(i: Int): Unit = { sm += i } }); sm }, _ + _) + assertEquals(sum, (0 until i).sum) + } + } +} + diff --git a/src/test/scala-2.13+/scala/compat/java8/StreamConvertersTest.scala b/src/test/scala-2.13+/scala/compat/java8/StreamConvertersTest.scala new file mode 100644 index 0000000..0beb9ae --- /dev/null +++ b/src/test/scala-2.13+/scala/compat/java8/StreamConvertersTest.scala @@ -0,0 +1,303 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import scala.language.higherKinds + +import org.junit.Test +import org.junit.Assert._ + +import java.util.stream._ +import StreamConverters._ + +class StreamConvertersTest { + + def assertEq[A](a1: A, a2: A, s: String): Unit = { assertEquals(s, a1, a2) } // Weird order normally! + def assertEq[A](a1: A, a2: A): Unit = { assertEq(a1, a2, "not equal") } + def assert(b: Boolean): Unit = { assertTrue(b) } + def assert(b: Boolean, s: String): Unit = { assertTrue(s, b) } + + def arrayO(n: Int) = (1 to n).map(_.toString).toArray + def arrayD(n: Int) = (1 to n).map(_.toDouble).toArray + def arrayI(n: Int) = (1 to n).toArray + def arrayL(n: Int) = (1 to n).map(_.toLong).toArray + + def newStream(n: Int) = java.util.Arrays.stream(arrayO(n)) + def newDoubleStream(n: Int) = java.util.Arrays.stream(arrayD(n)) + def newIntStream(n: Int) = java.util.Arrays.stream(arrayI(n)) + def newLongStream(n: Int) = java.util.Arrays.stream(arrayL(n)) + + val ns = Vector(0, 1, 2, 12, 15, 16, 17, 31, 32, 33, 151, 1298, 7159) + + @Test + def streamAccumulate(): Unit = { + for (n <- ns) { + val vecO = arrayO(n).toVector + val accO = newStream(n).parallel.accumulate + assertEq(vecO, newStream(n).accumulate.to(Vector), s"stream $n to vector") + assertEq(vecO, accO.to(Vector), s"stream $n to vector in parallel") + assertEq(vecO, accO.toArray.toVector, s"stream $n to vector via array in parallel") + assertEq(vecO, accO.iterator.toVector, s"stream $n to vector via iterator in parallel") + assertEq(vecO, accO.toList.toVector, s"stream $n to vector via list in parallel") + assert((0 until accO.size.toInt).forall(i => vecO(i) == accO(i)), s"stream $n indexed via accumulator") + assert(accO.isInstanceOf[scala.compat.java8.collectionImpl.Accumulator[_]], s"stream $n to generic accumulator") + + for (boxless <- Seq(false, true)) { + val sbox = (if (boxless) "" else "(boxed)") + val vecD = arrayD(n).toVector + val accD = + if (boxless) newDoubleStream(n).parallel.accumulate + else newDoubleStream(n).boxed.parallel.accumulatePrimitive + assertEq(vecD, newDoubleStream(n).accumulate.to(Vector), s"double stream $n to vector $sbox") + assertEq(vecD, accD.to(Vector), s"double stream $n to vector in parallel $sbox") + assertEq(vecD, accD.toArray.toVector, s"double stream $n to vector via array in parallel $sbox") + assertEq(vecD, accD.iterator.toVector, s"double stream $n to vector via iterator in parallel $sbox") + assertEq(vecD, accD.toList.toVector, s"double stream $n to vector via list in parallel $sbox") + assert((0 until accD.size.toInt).forall(i => vecD(i) == accD(i)), s"double stream $n indexed via accumulator $sbox") + assert(accD.isInstanceOf[scala.compat.java8.collectionImpl.DoubleAccumulator], s"double stream $n to generic accumulator $sbox") + + val vecI = arrayI(n).toVector + val accI = + if (boxless) newIntStream(n).parallel.accumulate + else newIntStream(n).boxed.parallel.accumulatePrimitive + assertEq(vecI, newIntStream(n).accumulate.to(Vector), s"int stream $n to vector $sbox") + assertEq(vecI, accI.to(Vector), s"int stream $n to vector in parallel $sbox") + assertEq(vecI, accI.toArray.toVector, s"int stream $n to vector via array in parallel $sbox") + assertEq(vecI, accI.iterator.toVector, s"int stream $n to vector via iterator in parallel $sbox") + assertEq(vecI, accI.toList.toVector, s"int stream $n to vector via list in parallel $sbox") + assert((0 until accI.size.toInt).forall(i => vecI(i) == accI(i)), s"int stream $n indexed via accumulator $sbox") + assert(accI.isInstanceOf[scala.compat.java8.collectionImpl.IntAccumulator], s"int stream $n to generic accumulator $sbox") + + val vecL = arrayL(n).toVector + val accL = + if (boxless) newLongStream(n).parallel.accumulate + else newLongStream(n).boxed.parallel.accumulatePrimitive + assertEq(vecL, newLongStream(n).accumulate.to(Vector), s"long stream $n to vector $sbox") + assertEq(vecL, accL.to(Vector), s"long stream $n to vector in parallel $sbox") + assertEq(vecL, accL.toArray.toVector, s"long stream $n to vector via array in parallel $sbox") + assertEq(vecL, accL.iterator.toVector, s"long stream $n to vector via iterator in parallel $sbox") + assertEq(vecL, accL.toList.toVector, s"long stream $n to vector via list in parallel $sbox") + assert((0 until accL.size.toInt).forall(i => vecL(i) == accL(i)), s"long stream $n indexed via accumulator $sbox") + assert(accL.isInstanceOf[scala.compat.java8.collectionImpl.LongAccumulator], s"long stream $n to generic accumulator $sbox") + } + } + } + + @Test + def streamToScala(): Unit = { + for (n <- ns) { + val vecO = arrayO(n).toVector + assertEq(vecO, newStream(n).toScala(Vector)) + assertEq(vecO, newStream(n).parallel.toScala(Vector)) + assertEq(vecO, newStream(n).toScala[Vector]) + assertEq(vecO, newStream(n).parallel.toScala[Vector]) + + val vecD = arrayD(n).toVector + assertEq(vecD, newDoubleStream(n).toScala(Vector)) + assertEq(vecD, newDoubleStream(n).parallel.toScala(Vector)) + assertEq(vecD, newDoubleStream(n).toScala[Vector]) + assertEq(vecD, newDoubleStream(n).parallel.toScala[Vector]) + + val vecI = arrayI(n).toVector + assertEq(vecI, newIntStream(n).toScala(Vector)) + assertEq(vecI, newIntStream(n).parallel.toScala(Vector)) + assertEq(vecI, newIntStream(n).toScala[Vector]) + assertEq(vecI, newIntStream(n).parallel.toScala[Vector]) + + val vecL = arrayL(n).toVector + assertEq(vecL, newLongStream(n).toScala(Vector)) + assertEq(vecL, newLongStream(n).parallel.toScala(Vector)) + assertEq(vecL, newLongStream(n).toScala[Vector]) + assertEq(vecL, newLongStream(n).parallel.toScala[Vector]) + } + } + + @Test + def streamUnbox(): Unit = { + assert(newDoubleStream(1).boxed.unboxed.isInstanceOf[DoubleStream]) + assert(newIntStream(1).boxed.unboxed.isInstanceOf[IntStream]) + assert(newLongStream(1).boxed.unboxed.isInstanceOf[LongStream]) + } + + import collection.mutable.{ ArrayBuffer, ArraySeq } + def abufO(n: Int) = { val ab = new ArrayBuffer[String]; arrayO(n).foreach(ab += _); ab } + def abufD(n: Int) = { val ab = new ArrayBuffer[Double]; arrayD(n).foreach(ab += _); ab } + def abufI(n: Int) = { val ab = new ArrayBuffer[Int]; arrayI(n).foreach(ab += _); ab } + def abufL(n: Int) = { val ab = new ArrayBuffer[Long]; arrayL(n).foreach(ab += _); ab } + def wrapO(n: Int): ArraySeq[String] = arrayO(n) + def wrapD(n: Int): ArraySeq[Double] = arrayD(n) + def wrapI(n: Int): ArraySeq[Int] = arrayI(n) + def wrapL(n: Int): ArraySeq[Long] = arrayL(n) + def vectO(n: Int) = arrayO(n).toVector + def vectD(n: Int) = arrayD(n).toVector + def vectI(n: Int) = arrayI(n).toVector + def vectL(n: Int) = arrayL(n).toVector + def genhset[A](aa: Array[A]) = { val hs = new collection.mutable.HashSet[A]; aa.foreach(hs += _); hs } + def hsetO(n: Int) = genhset(arrayO(n)) + def hsetD(n: Int) = genhset(arrayD(n)) + def hsetI(n: Int) = genhset(arrayI(n)) + def hsetL(n: Int) = genhset(arrayL(n)) + + @Test + def scalaToStream(): Unit = { + for (n <- ns) { + val arrO = arrayO(n) + val seqO = arrO.toSeq + val abO = abufO(n) + val wrO = wrapO(n) + val vecO = vectO(n) + val hsO = hsetO(n) + // Seems like a lot of boilerplate, but we need it to test implicit resolution + assertEq(seqO, seqO.seqStream.toScala[Seq]) +// assertEq(seqO, seqO.stepper.parStream.toScala[Seq]) // Must go through stepper if we're unsure whether we can parallelize well + assertEq(seqO, arrO.seqStream.toScala[Seq]) + assertEq(seqO, arrO.parStream.toScala[Seq]) + assertEq(seqO, abO.seqStream.toScala[Seq]) + assertEq(seqO, abO.parStream.toScala[Seq]) + assertEq(seqO, wrO.seqStream.toScala[Seq]) + assertEq(seqO, wrO.parStream.toScala[Seq]) + assertEq(seqO, vecO.seqStream.toScala[Seq]) + assertEq(seqO, vecO.parStream.toScala[Seq]) +// assertEq(seqO, hsO.seqStream.toScala[Seq].sortBy(_.toInt)) +// assertEq(seqO, hsO.parStream.toScala[Seq].sortBy(_.toInt)) + + val arrD = arrayD(n) + val seqD = arrD.toSeq + val abD = abufD(n) + val wrD = wrapD(n) + val vecD = vectD(n) + val hsD = hsetD(n) + assertEq(seqD, seqD.seqStream.toScala[Seq]) +// assertEq(seqD, seqD.stepper.parStream.toScala[Seq]) + assertEq(seqD, arrD.seqStream.toScala[Seq]) + assertEq(seqD, arrD.parStream.toScala[Seq]) + assert(arrD.seqStream.isInstanceOf[DoubleStream]) + assert(arrD.parStream.isInstanceOf[DoubleStream]) + assertEq(seqD, abD.seqStream.toScala[Seq]) + assertEq(seqD, abD.parStream.toScala[Seq]) + assert(abD.seqStream.isInstanceOf[DoubleStream]) + assert(abD.parStream.isInstanceOf[DoubleStream]) + assertEq(seqD, wrD.seqStream.toScala[Seq]) + assertEq(seqD, wrD.parStream.toScala[Seq]) + assert(wrD.seqStream.isInstanceOf[DoubleStream]) + assert(wrD.parStream.isInstanceOf[DoubleStream]) + assertEq(seqD, vecD.seqStream.toScala[Seq]) + assertEq(seqD, vecD.parStream.toScala[Seq]) + assert(vecD.seqStream.isInstanceOf[DoubleStream]) + assert(vecD.parStream.isInstanceOf[DoubleStream]) +// assertEq(seqD, hsD.seqStream.toScala[Seq].sorted) +// assertEq(seqD, hsD.parStream.toScala[Seq].sorted) +// assert(hsD.seqStream.isInstanceOf[DoubleStream]) +// assert(hsD.parStream.isInstanceOf[DoubleStream]) + + val arrI = arrayI(n) + val seqI = arrI.toSeq + val abI = abufI(n) + val wrI = wrapI(n) + val vecI = vectI(n) + val hsI = hsetI(n) + assertEq(seqI, seqI.seqStream.toScala[Seq]) +// assertEq(seqI, seqI.stepper.parStream.toScala[Seq]) + assertEq(seqI, arrI.seqStream.toScala[Seq]) + assertEq(seqI, arrI.parStream.toScala[Seq]) + assert(arrI.seqStream.isInstanceOf[IntStream]) + assert(arrI.parStream.isInstanceOf[IntStream]) + assertEq(seqI, abI.seqStream.toScala[Seq]) + assertEq(seqI, abI.parStream.toScala[Seq]) + assert(abI.seqStream.isInstanceOf[IntStream]) + assert(abI.parStream.isInstanceOf[IntStream]) + assertEq(seqI, wrI.seqStream.toScala[Seq]) + assertEq(seqI, wrI.parStream.toScala[Seq]) + assert(wrI.seqStream.isInstanceOf[IntStream]) + assert(wrI.parStream.isInstanceOf[IntStream]) + assertEq(seqI, vecI.seqStream.toScala[Seq]) + assertEq(seqI, vecI.parStream.toScala[Seq]) + assert(vecI.seqStream.isInstanceOf[IntStream]) + assert(vecI.parStream.isInstanceOf[IntStream]) +// assertEq(seqI, hsI.seqStream.toScala[Seq].sorted) +// assertEq(seqI, hsI.parStream.toScala[Seq].sorted) +// assert(hsI.seqStream.isInstanceOf[IntStream]) +// assert(hsI.parStream.isInstanceOf[IntStream]) + + val arrL = arrayL(n) + val seqL = arrL.toSeq + val abL = abufL(n) + val wrL = wrapL(n) + val vecL = vectL(n) + val hsL = hsetL(n) + assertEq(seqL, seqL.seqStream.toScala[Seq]) +// assertEq(seqL, seqL.stepper.parStream.toScala[Seq]) + assertEq(seqL, arrL.seqStream.toScala[Seq]) + assertEq(seqL, arrL.parStream.toScala[Seq]) + assert(arrL.seqStream.isInstanceOf[LongStream]) + assert(arrL.parStream.isInstanceOf[LongStream]) + assertEq(seqL, abL.seqStream.toScala[Seq]) + assertEq(seqL, abL.parStream.toScala[Seq]) + assert(abL.seqStream.isInstanceOf[LongStream]) + assert(abL.parStream.isInstanceOf[LongStream]) + assertEq(seqD, wrD.seqStream.toScala[Seq]) + assertEq(seqD, wrD.parStream.toScala[Seq]) + assert(wrL.seqStream.isInstanceOf[LongStream]) + assert(wrL.parStream.isInstanceOf[LongStream]) + assertEq(seqD, wrD.seqStream.toScala[Seq]) + assertEq(seqD, wrD.parStream.toScala[Seq]) + assert(vecL.seqStream.isInstanceOf[LongStream]) + assert(vecL.parStream.isInstanceOf[LongStream]) +// assertEq(seqL, hsL.seqStream.toScala[Seq].sorted) +// assertEq(seqL, hsL.parStream.toScala[Seq].sorted) +// assert(hsL.seqStream.isInstanceOf[LongStream]) +// assert(hsL.parStream.isInstanceOf[LongStream]) + } + } + + @Test + def primitiveStreamTypes(): Unit = { + // Unboxed native + widening Steppers available: + assertEquals(Vector[Int](1, 2, 3), (Array[Int](1, 2, 3).seqStream: IntStream).toScala[Vector]) + assertEquals(Vector[Short](1.toShort, 2.toShort, 3.toShort), (Array[Short](1.toShort, 2.toShort, 3.toShort).seqStream: IntStream).toScala[Vector]) + assertEquals(Vector[String]("a", "b"), (Array[String]("a", "b").seqStream: Stream[String]).toScala[Vector]) + + // Boxed collections, widening via boxed AnySteppers: + assertEquals(Vector[Int](1, 2, 3), (Vector[Int](1, 2, 3).seqStream: IntStream).toScala[Vector]) + assertEquals(Vector[Short](1.toShort, 2.toShort, 3.toShort), (Vector[Short](1.toShort, 2.toShort, 3.toShort).seqStream: IntStream).toScala[Vector]) + assertEquals(Vector[String]("a", "b"), (Vector[String]("a", "b").seqStream: Stream[String]).toScala[Vector]) + } + + @Test + def issue_87(): Unit = { + // Vectors that are generated from other vectors tend _not_ to + // have all their display vectors consistent; the cached vectors + // are correct, but the higher-level vector does _not_ contain + // the cached vector in the correct place (for efficiency)! This + // is called being "dirty", and needs to be handled specially. + val dirtyDisplayVector = Vector.fill(120)("a").slice(0, 40) + val shouldNotNPE = + dirtyDisplayVector.seqStream.collect(Collectors.toList()) + assertEq(shouldNotNPE.toArray(new Array[String](0)).toVector, dirtyDisplayVector, "Vector[Any].seqStream (with dirty display)") + + val dirtyDisplayVectorInt = Vector.fill(120)(999).slice(0, 40) + val shouldNotNPEInt = + dirtyDisplayVectorInt.seqStream.sum() + assertEq(shouldNotNPEInt, dirtyDisplayVectorInt.sum, "Vector[Int].seqStream (with dirty display)") + + val dirtyDisplayVectorLong = Vector.fill(120)(99999999999L).slice(0, 40) + val shouldNotNPELong = + dirtyDisplayVectorLong.seqStream.sum() + assertEq(shouldNotNPELong, dirtyDisplayVectorLong.sum, "Vector[Long].seqStream (with dirty display)") + + val dirtyDisplayVectorDouble = Vector.fill(120)(0.1).slice(0, 40) + val shouldNotNPEDouble = + math.rint(dirtyDisplayVectorDouble.seqStream.sum() * 10) + assertEq(shouldNotNPEDouble, math.rint(dirtyDisplayVectorDouble.sum * 10), "Vector[Double].seqStream (with dirty display)") + } +} diff --git a/src/test/scala-2.13-/scala/compat/java8/StepConvertersTest.scala b/src/test/scala-2.13-/scala/compat/java8/StepConvertersTest.scala new file mode 100644 index 0000000..d3e118c --- /dev/null +++ b/src/test/scala-2.13-/scala/compat/java8/StepConvertersTest.scala @@ -0,0 +1,606 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import org.junit.Test +import org.junit.Assert._ + +class StepConvertersTest { + import java.util._ + import collectionImpl._ + import converterImpl._ + import StreamConverters._ // Includes StepConverters! + import scala.{ collection => co } + import collection.{ mutable => cm, immutable => ci, concurrent => cc } + + def isAcc[X](x: X): Boolean = x match { + case _: AccumulatorStepper[_] => true + case _: DoubleAccumulatorStepper => true + case _: IntAccumulatorStepper => true + case _: LongAccumulatorStepper => true + case _ => false + } + + def isLin[X](x: X): Boolean = x match { + case _: AbstractStepsLikeIterator[_, _, _] => true + case _: AbstractStepsWithTail[_, _, _] => true + case _ => false + } + + trait SpecCheck { + def check[X](x: X): Boolean + def msg[X](x: X): String + def assert(x: Any): Unit = + if(!check(x)) assertTrue(msg(x), false) + } + object SpecCheck { + def apply(f: Any => Boolean, err: Any => String = (_ => "SpecCheck failed")) = new SpecCheck { + def check[X](x: X): Boolean = f(x) + def msg[X](x: X): String = err(x) + } + } + + def _eh_[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + } + + def IFFY[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + assertTrue(isAcc(x)) + } + + def Okay[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + assertTrue(!isAcc(x)) + assertTrue(isLin(x)) + } + + def Fine[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + assertTrue(!isAcc(x)) + } + + def good[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + assertTrue(!isAcc(x)) + assertTrue(!isLin(x)) + } + + def Tell[X](x: => X)(implicit correctSpec: SpecCheck): Unit = { + println(x.getClass.getName + " -> " + isAcc(x)) + assertTrue(x.isInstanceOf[Stepper[_]]) + correctSpec.assert(x) + } + + @Test + def comprehensivelyGeneric(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[AnyStepper[_]]) + + // Collection section + Okay( co.Iterator[String]("salmon").buffered.stepper ) + good( co.IndexedSeq[String]("salmon").stepper ) + Okay( co.Iterable[String]("salmon").stepper ) + Okay( co.Iterable[String]("salmon").view.stepper ) + Okay( co.Iterator[String]("salmon").stepper ) + Okay( co.LinearSeq[String]("salmon").stepper ) + Okay( co.Map[String, String]("fish" -> "salmon").stepper ) + Okay( co.Map[String, String]("fish" -> "salmon").keyStepper ) + Okay( co.Map[String, String]("fish" -> "salmon").valueStepper ) + Okay( co.Seq[String]("salmon").stepper ) + Okay( co.Seq[String]("salmon").view.stepper ) + Okay( co.Set[String]("salmon").stepper ) + Okay( co.SortedMap[String, String]("fish" -> "salmon").stepper ) + Okay( co.SortedMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( co.SortedMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( co.SortedSet[String]("salmon").stepper ) + IFFY( co.Traversable[String]("salmon").accumulate.stepper ) + IFFY( (co.Iterator[String]("salmon"): co.TraversableOnce[String]).accumulate.stepper ) + IFFY( co.Traversable[String]("salmon").view.accumulate.stepper ) + + // Immutable section + Okay( ci.::("salmon", Nil).stepper ) + Okay( (ci.HashMap[String, String]("fish" -> "salmon"): ci.AbstractMap[String, String]).stepper ) + Okay( (ci.HashMap[String, String]("fish" -> "salmon"): ci.AbstractMap[String, String]).keyStepper ) + Okay( (ci.HashMap[String, String]("fish" -> "salmon"): ci.AbstractMap[String, String]).valueStepper ) + good( ci.HashMap[String, String]("fish" -> "salmon").stepper ) + good( ci.HashMap[String, String]("fish" -> "salmon").keyStepper ) + good( ci.HashMap[String, String]("fish" -> "salmon").valueStepper ) + good( ci.HashSet[String]("salmon").stepper ) + good( ci.IndexedSeq[String]("salmon").stepper ) + Okay( ci.IntMap[String](123456 -> "salmon").stepper ) + Okay( ci.IntMap[String](123456 -> "salmon").valueStepper ) + Okay( ci.Iterable[String]("salmon").stepper ) + Okay( ci.LinearSeq[String]("salmon").stepper ) + Okay( ci.List[String]("salmon").stepper ) + Okay( ci.ListMap[String, String]("fish" -> "salmon").stepper ) + Okay( ci.ListMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( ci.ListMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( ci.ListSet[String]("salmon").stepper ) + Okay( ci.LongMap[String](9876543210L -> "salmon").stepper ) + Okay( ci.LongMap[String](9876543210L -> "salmon").valueStepper ) + Okay( ci.Map[String, String]("fish" -> "salmon").stepper ) + Okay( ci.Map[String, String]("fish" -> "salmon").keyStepper ) + Okay( ci.Map[String, String]("fish" -> "salmon").valueStepper ) + Okay( ci.Queue[String]("salmon").stepper ) + Okay( ci.Seq[String]("salmon").stepper ) + Okay( ci.Set[String]("salmon").stepper ) + Okay( ci.SortedMap[String, String]("fish" -> "salmon").stepper ) + Okay( ci.SortedMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( ci.SortedMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( ci.SortedSet[String]("salmon").stepper ) + Okay( ci.Stack[String]("salmon").stepper ) + Okay( ci.Stream[String]("salmon").stepper ) + _eh_( ci.Stream[String]("salmon").view.stepper ) + IFFY( ci.Traversable[String]("salmon").accumulate.stepper ) + Okay( ci.TreeMap[String, String]("fish" -> "salmon").stepper ) + Okay( ci.TreeMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( ci.TreeMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( ci.TreeSet[String]("salmon").stepper ) + good( ci.Vector[String]("salmon").stepper ) + + // Mutable section + Okay( (cm.ArrayBuffer[String]("salmon"): cm.AbstractBuffer[String]).stepper ) + Okay( (cm.PriorityQueue[String]("salmon"): cm.AbstractIterable[String]).stepper ) + Okay( (cm.HashMap[String, String]("fish" -> "salmon"): cm.AbstractMap[String, String]).stepper ) + Okay( (cm.HashMap[String, String]("fish" -> "salmon"): cm.AbstractMap[String, String]).keyStepper ) + Okay( (cm.HashMap[String, String]("fish" -> "salmon"): cm.AbstractMap[String, String]).valueStepper ) + Okay( (cm.ArrayBuffer[String]("salmon"): cm.AbstractSeq[String]).stepper ) + Okay( (cm.HashSet[String]("salmon"): cm.AbstractSet[String]).stepper ) + Okay( cm.AnyRefMap[String,String]("fish" -> "salmon").stepper ) + Okay( cm.AnyRefMap[String,String]("fish" -> "salmon").keyStepper ) + Okay( cm.AnyRefMap[String,String]("fish" -> "salmon").valueStepper ) + good( cm.ArrayBuffer[String]("salmon").stepper ) + good( ((Array("salmon"): cm.WrappedArray[String]): cm.ArrayLike[String, cm.WrappedArray[String]]).stepper ) + good( (Array("salmon"): cm.ArrayOps[String]).stepper ) + good( cm.ArraySeq[String]("salmon").stepper ) + _eh_( cm.ArrayStack[String]("salmon").stepper ) + Okay( (cm.ArrayBuffer[String]("salmon"): cm.Buffer[String]).stepper ) + good( cm.HashMap[String, String]("fish" -> "salmon").stepper ) + good( cm.HashMap[String, String]("fish" -> "salmon").keyStepper ) + good( cm.HashMap[String, String]("fish" -> "salmon").valueStepper ) + good( cm.HashSet[String]("salmon").stepper ) + good( cm.IndexedSeq[String]("salmon").stepper ) + good( cm.IndexedSeq[String]("salmon").view.stepper ) + Okay( cm.Iterable[String]("salmon").stepper ) + Okay( cm.LinearSeq[String]("salmon").stepper ) + good( cm.LinkedHashMap[String, String]("fish" -> "salmon").stepper ) + good( cm.LinkedHashMap[String, String]("fish" -> "salmon").keyStepper ) + good( cm.LinkedHashMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( cm.LinkedHashSet[String]("salmon").stepper ) + Okay( cm.ListBuffer[String]("salmon").stepper ) + Okay( cm.ListMap[String, String]("fish" -> "salmon").stepper ) + Okay( cm.ListMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.ListMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.LongMap[String](9876543210L -> "salmon").stepper ) + Okay( cm.LongMap[String](9876543210L -> "salmon").valueStepper ) + Okay( cm.Map[String, String]("fish" -> "salmon").stepper ) + Okay( cm.Map[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.Map[String, String]("fish" -> "salmon").valueStepper ) + Okay( cm.MutableList[String]("salmon").stepper ) + Okay( cm.OpenHashMap[String, String]("fish" -> "salmon").stepper ) + Okay( cm.OpenHashMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.OpenHashMap[String, String]("fish" -> "salmon").valueStepper ) + Okay( cm.PriorityQueue[String]("salmon").stepper ) + Okay( cm.Queue[String]("salmon").stepper ) + good( cm.ResizableArray[String]("salmon").stepper ) + Okay( cm.Seq[String]("salmon").stepper ) + Okay( cm.Set[String]("salmon").stepper ) + Okay( cm.SortedSet[String]("salmon").stepper ) + Okay( cm.Stack[String]("salmon").stepper ) + IFFY( cm.Traversable[String]("salmon").accumulate.stepper ) + Okay( cm.TreeSet[String]("salmon").stepper ) + Okay( cm.UnrolledBuffer[String]("salmon").stepper ) + Okay( cm.WeakHashMap[String, String]("fish" -> "salmon").stepper ) + Okay( cm.WeakHashMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cm.WeakHashMap[String, String]("fish" -> "salmon").valueStepper ) + good( (Array("salmon"): cm.WrappedArray[String]).stepper ) + + // Java 6 converters section + + // Concurrent section + Okay( cc.TrieMap[String, String]("fish" -> "salmon").stepper ) + Okay( cc.TrieMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( cc.TrieMap[String, String]("fish" -> "salmon").keyStepper ) + Okay( (cc.TrieMap[String, String]("fish" -> "salmon"): cc.Map[String, String]).stepper ) + Okay( (cc.TrieMap[String, String]("fish" -> "salmon"): cc.Map[String, String]).keyStepper ) + Okay( (cc.TrieMap[String, String]("fish" -> "salmon"): cc.Map[String, String]).valueStepper ) + } + + @Test + def comprehensivelyDouble(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[DoubleStepper]) + //Double-specific tests + + // Collection section + Okay( co.Iterator[Double](3.14159).buffered.stepper ) + good( co.IndexedSeq[Double](3.14159).stepper ) + Okay( co.Iterable[Double](3.14159).stepper ) + Okay( co.Iterable[Double](3.14159).view.stepper ) + Okay( co.Iterator[Double](3.14159).stepper ) + Okay( co.LinearSeq[Double](3.14159).stepper ) + Okay( co.Map[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( co.Map[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( co.Seq[Double](3.14159).stepper ) + Okay( co.Seq[Double](3.14159).view.stepper ) + Okay( co.Set[Double](3.14159).stepper ) + Okay( co.SortedMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( co.SortedMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( co.SortedSet[Double](3.14159).stepper ) + IFFY( co.Traversable[Double](3.14159).accumulate.stepper ) + IFFY( (co.Iterator[Double](3.14159): co.TraversableOnce[Double]).accumulate.stepper ) + IFFY( co.Traversable[Double](3.14159).view.accumulate.stepper ) + + // Immutable section + Okay( ci.::(3.14159, Nil).stepper ) + Okay( (ci.HashMap[Double, Double](2.718281828 -> 3.14159): ci.AbstractMap[Double, Double]).keyStepper ) + Okay( (ci.HashMap[Double, Double](2.718281828 -> 3.14159): ci.AbstractMap[Double, Double]).valueStepper ) + good( ci.HashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + good( ci.HashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + good( ci.HashSet[Double](3.14159).stepper ) + good( ci.IndexedSeq[Double](3.14159).stepper ) + Okay( ci.IntMap[Double](123456 -> 3.14159).valueStepper ) + Okay( ci.Iterable[Double](3.14159).stepper ) + Okay( ci.LinearSeq[Double](3.14159).stepper ) + Okay( ci.List[Double](3.14159).stepper ) + Okay( ci.ListMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( ci.ListMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( ci.ListSet[Double](3.14159).stepper ) + Okay( ci.LongMap[Double](9876543210L -> 3.14159).valueStepper ) + Okay( ci.Map[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( ci.Map[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( ci.Queue[Double](3.14159).stepper ) + Okay( ci.Seq[Double](3.14159).stepper ) + Okay( ci.Set[Double](3.14159).stepper ) + Okay( ci.SortedMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( ci.SortedMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( ci.SortedSet[Double](3.14159).stepper ) + Okay( ci.Stack[Double](3.14159).stepper ) + Okay( ci.Stream[Double](3.14159).stepper ) + _eh_( ci.Stream[Double](3.14159).view.stepper ) + IFFY( ci.Traversable[Double](3.14159).accumulate.stepper ) + Okay( ci.TreeMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( ci.TreeMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( ci.TreeSet[Double](3.14159).stepper ) + good( ci.Vector[Double](3.14159).stepper ) + + // Mutable section + Okay( (cm.ArrayBuffer[Double](3.14159): cm.AbstractBuffer[Double]).stepper ) + Okay( (cm.PriorityQueue[Double](3.14159): cm.AbstractIterable[Double]).stepper ) + Okay( (cm.HashMap[Double, Double](2.718281828 -> 3.14159): cm.AbstractMap[Double, Double]).keyStepper ) + Okay( (cm.HashMap[Double, Double](2.718281828 -> 3.14159): cm.AbstractMap[Double, Double]).valueStepper ) + Okay( (cm.ArrayBuffer[Double](3.14159): cm.AbstractSeq[Double]).stepper ) + Okay( (cm.HashSet[Double](3.14159): cm.AbstractSet[Double]).stepper ) + Okay( cm.AnyRefMap[String,Double]("fish" -> 3.14159).valueStepper ) + good( cm.ArrayBuffer[Double](3.14159).stepper ) + good( ((Array(3.14159): cm.WrappedArray[Double]): cm.ArrayLike[Double, cm.WrappedArray[Double]]).stepper ) + good( (Array(3.14159): cm.ArrayOps[Double]).stepper ) + good( cm.ArraySeq[Double](3.14159).stepper ) + _eh_( cm.ArrayStack[Double](3.14159).stepper ) + Okay( (cm.ArrayBuffer[Double](3.14159): cm.Buffer[Double]).stepper ) + good( cm.HashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + good( cm.HashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + good( cm.HashSet[Double](3.14159).stepper ) + good( cm.IndexedSeq[Double](3.14159).stepper ) + good( cm.IndexedSeq[Double](3.14159).view.stepper ) + Okay( cm.Iterable[Double](3.14159).stepper ) + Okay( cm.LinearSeq[Double](3.14159).stepper ) + good( cm.LinkedHashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + good( cm.LinkedHashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( cm.LinkedHashSet[Double](3.14159).stepper ) + Okay( cm.ListBuffer[Double](3.14159).stepper ) + Okay( cm.ListMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cm.ListMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( cm.LongMap[Double](9876543210L -> 3.14159).valueStepper ) + Okay( cm.Map[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cm.Map[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( cm.MutableList[Double](3.14159).stepper ) + Okay( cm.OpenHashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cm.OpenHashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( cm.PriorityQueue[Double](3.14159).stepper ) + Okay( cm.Queue[Double](3.14159).stepper ) + good( cm.ResizableArray[Double](3.14159).stepper ) + Okay( cm.Seq[Double](3.14159).stepper ) + Okay( cm.Set[Double](3.14159).stepper ) + Okay( cm.SortedSet[Double](3.14159).stepper ) + Okay( cm.Stack[Double](3.14159).stepper ) + IFFY( cm.Traversable[Double](3.14159).accumulate.stepper ) + Okay( cm.TreeSet[Double](3.14159).stepper ) + Okay( cm.UnrolledBuffer[Double](3.14159).stepper ) + Okay( cm.WeakHashMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cm.WeakHashMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + good( (Array(3.14159): cm.WrappedArray[Double]).stepper ) + + // Java 6 converters section + + // Concurrent section + Okay( cc.TrieMap[Double, Double](2.718281828 -> 3.14159).keyStepper ) + Okay( cc.TrieMap[Double, Double](2.718281828 -> 3.14159).valueStepper ) + Okay( (cc.TrieMap[Double, Double](2.718281828 -> 3.14159): cc.Map[Double, Double]).keyStepper ) + Okay( (cc.TrieMap[Double, Double](2.718281828 -> 3.14159): cc.Map[Double, Double]).valueStepper ) + } + + @Test + def comprehensivelyInt(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[IntStepper], x => s"$x should be an IntStepper") + + // Int-specific tests + good( co.BitSet(42).stepper ) + good( ci.BitSet(42).stepper ) + good( ci.NumericRange(123456, 123458, 1).stepper ) + good( cm.BitSet(42).stepper ) + good( (1 until 2).stepper ) + Okay( ci.IntMap[String](123456 -> "salmon").keyStepper ) + Okay( ci.IntMap[Double](123456 -> 3.14159).keyStepper ) + Okay( ci.IntMap[Long](123456 -> 0x123456789L).keyStepper ) + + // Collection section + Okay( co.Iterator[Int](654321).buffered.stepper ) + good( co.IndexedSeq[Int](654321).stepper ) + Okay( co.Iterable[Int](654321).stepper ) + Okay( co.Iterable[Int](654321).view.stepper ) + Okay( co.Iterator[Int](654321).stepper ) + Okay( co.LinearSeq[Int](654321).stepper ) + Okay( co.Map[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( co.Map[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( co.Seq[Int](654321).stepper ) + Okay( co.Seq[Int](654321).view.stepper ) + Okay( co.Set[Int](654321).stepper ) + Okay( co.SortedMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( co.SortedMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( co.SortedSet[Int](654321).stepper ) + IFFY( co.Traversable[Int](654321).accumulate.stepper ) + IFFY( (co.Iterator[Int](654321): co.TraversableOnce[Int]).accumulate.stepper ) + IFFY( co.Traversable[Int](654321).view.accumulate.stepper ) + + // Immutable section + Okay( ci.::(654321, Nil).stepper ) + Okay( (ci.HashMap[Int, Int](0xDEEDED -> 654321): ci.AbstractMap[Int, Int]).keyStepper ) + Okay( (ci.HashMap[Int, Int](0xDEEDED -> 654321): ci.AbstractMap[Int, Int]).valueStepper ) + good( ci.HashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + good( ci.HashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + good( ci.HashSet[Int](654321).stepper ) + good( ci.IndexedSeq[Int](654321).stepper ) + Okay( ci.IntMap[Int](123456 -> 654321).keyStepper ) + Okay( ci.IntMap[Int](123456 -> 654321).valueStepper ) + Okay( ci.Iterable[Int](654321).stepper ) + Okay( ci.LinearSeq[Int](654321).stepper ) + Okay( ci.List[Int](654321).stepper ) + Okay( ci.ListMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( ci.ListMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( ci.ListSet[Int](654321).stepper ) + Okay( ci.LongMap[Int](9876543210L -> 654321).valueStepper ) + Okay( ci.Map[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( ci.Map[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( ci.Queue[Int](654321).stepper ) + Okay( ci.Seq[Int](654321).stepper ) + Okay( ci.Set[Int](654321).stepper ) + Okay( ci.SortedMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( ci.SortedMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( ci.SortedSet[Int](654321).stepper ) + Okay( ci.Stack[Int](654321).stepper ) + Okay( ci.Stream[Int](654321).stepper ) + _eh_( ci.Stream[Int](654321).view.stepper ) + IFFY( ci.Traversable[Int](654321).accumulate.stepper ) + Okay( ci.TreeMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( ci.TreeMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( ci.TreeSet[Int](654321).stepper ) + good( ci.Vector[Int](654321).stepper ) + + // Mutable section + Okay( (cm.ArrayBuffer[Int](654321): cm.AbstractBuffer[Int]).stepper ) + Okay( (cm.PriorityQueue[Int](654321): cm.AbstractIterable[Int]).stepper ) + Okay( (cm.HashMap[Int, Int](0xDEEDED -> 654321): cm.AbstractMap[Int, Int]).keyStepper ) + Okay( (cm.HashMap[Int, Int](0xDEEDED -> 654321): cm.AbstractMap[Int, Int]).valueStepper ) + Okay( (cm.ArrayBuffer[Int](654321): cm.AbstractSeq[Int]).stepper ) + Okay( (cm.HashSet[Int](654321): cm.AbstractSet[Int]).stepper ) + Okay( cm.AnyRefMap[String, Int]("fish" -> 654321).valueStepper ) + good( cm.ArrayBuffer[Int](654321).stepper ) + good( ((Array(654321): cm.WrappedArray[Int]): cm.ArrayLike[Int, cm.WrappedArray[Int]]).stepper ) + good( (Array(654321): cm.ArrayOps[Int]).stepper ) + good( cm.ArraySeq[Int](654321).stepper ) + _eh_( cm.ArrayStack[Int](654321).stepper ) + Okay( (cm.ArrayBuffer[Int](654321): cm.Buffer[Int]).stepper ) + good( cm.HashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + good( cm.HashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + good( cm.HashSet[Int](654321).stepper ) + good( cm.IndexedSeq[Int](654321).stepper ) + good( cm.IndexedSeq[Int](654321).view.stepper ) + Okay( cm.Iterable[Int](654321).stepper ) + Okay( cm.LinearSeq[Int](654321).stepper ) + good( cm.LinkedHashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + good( cm.LinkedHashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( cm.LinkedHashSet[Int](654321).stepper ) + Okay( cm.ListBuffer[Int](654321).stepper ) + Okay( cm.ListMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cm.ListMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( cm.LongMap[Int](9876543210L -> 654321).valueStepper ) + Okay( cm.Map[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cm.Map[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( cm.MutableList[Int](654321).stepper ) + Okay( cm.OpenHashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cm.OpenHashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( cm.PriorityQueue[Int](654321).stepper ) + Okay( cm.Queue[Int](654321).stepper ) + good( cm.ResizableArray[Int](654321).stepper ) + Okay( cm.Seq[Int](654321).stepper ) + Okay( cm.Set[Int](654321).stepper ) + Okay( cm.SortedSet[Int](654321).stepper ) + Okay( cm.Stack[Int](654321).stepper ) + IFFY( cm.Traversable[Int](654321).accumulate.stepper ) + Okay( cm.TreeSet[Int](654321).stepper ) + Okay( cm.UnrolledBuffer[Int](654321).stepper ) + Okay( cm.WeakHashMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cm.WeakHashMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + good( (Array(654321): cm.WrappedArray[Int]).stepper ) + + // Java 6 converters section + + // Concurrent section + Okay( cc.TrieMap[Int, Int](0xDEEDED -> 654321).keyStepper ) + Okay( cc.TrieMap[Int, Int](0xDEEDED -> 654321).valueStepper ) + Okay( (cc.TrieMap[Int, Int](0xDEEDED -> 654321): cc.Map[Int, Int]).keyStepper ) + Okay( (cc.TrieMap[Int, Int](0xDEEDED -> 654321): cc.Map[Int, Int]).valueStepper ) + } + + @Test + def shortWidening(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[IntStepper], x => s"$x should be an IntStepper") + + good( Array[Short](654321.toShort).stepper ) + good( (Array[Short](654321.toShort): cm.WrappedArray[Short]).stepper ) + + //TODO: None of these currently work because there are no native Stepper implementations: + + //good( ci.NumericRange(123456.toShort, 123458.toShort, 1.toShort).stepper ) + //good( ((Array[Short](654321.toShort): cm.WrappedArray[Short]): cm.ArrayLike[Short, cm.WrappedArray[Short]]).stepper ) + //good( (Array[Short](654321.toShort): cm.ArrayOps[Short]).stepper ) + //good( cm.ResizableArray[Short](654321.toShort).stepper ) + } + + @Test + def comprehensivelyLong(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[LongStepper]) + + // Long-specific tests + good( ci.NumericRange(9876543210L, 9876543212L, 1L).stepper ) + Okay( ci.LongMap[String](9876543210L -> "salmon").keyStepper ) + Okay( cm.LongMap[String](9876543210L -> "salmon").keyStepper ) + Okay( ci.LongMap[Double](9876543210L -> 3.14159).keyStepper ) + Okay( cm.LongMap[Double](9876543210L -> 3.14159).keyStepper ) + Okay( ci.LongMap[Int](9876543210L -> 654321).keyStepper ) + Okay( cm.LongMap[Int](9876543210L -> 654321).keyStepper ) + + // Collection section + Okay( co.Iterator[Long](0x123456789L).buffered.stepper ) + good( co.IndexedSeq[Long](0x123456789L).stepper ) + Okay( co.Iterable[Long](0x123456789L).stepper ) + Okay( co.Iterable[Long](0x123456789L).view.stepper ) + Okay( co.Iterator[Long](0x123456789L).stepper ) + Okay( co.LinearSeq[Long](0x123456789L).stepper ) + Okay( co.Map[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( co.Map[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( co.Seq[Long](0x123456789L).stepper ) + Okay( co.Seq[Long](0x123456789L).view.stepper ) + Okay( co.Set[Long](0x123456789L).stepper ) + Okay( co.SortedMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( co.SortedMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( co.SortedSet[Long](0x123456789L).stepper ) + IFFY( co.Traversable[Long](0x123456789L).accumulate.stepper ) + IFFY( (co.Iterator[Long](0x123456789L): co.TraversableOnce[Long]).accumulate.stepper ) + IFFY( co.Traversable[Long](0x123456789L).view.accumulate.stepper ) + + // Immutable section + Okay( ci.::(0x123456789L, Nil).stepper ) + Okay( (ci.HashMap[Long, Long](1234567654321L -> 0x123456789L): ci.AbstractMap[Long, Long]).keyStepper ) + Okay( (ci.HashMap[Long, Long](1234567654321L -> 0x123456789L): ci.AbstractMap[Long, Long]).valueStepper ) + good( ci.HashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + good( ci.HashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + good( ci.HashSet[Long](0x123456789L).stepper ) + good( ci.IndexedSeq[Long](0x123456789L).stepper ) + Okay( ci.IntMap[Long](123456 -> 0x123456789L).valueStepper ) + Okay( ci.Iterable[Long](0x123456789L).stepper ) + Okay( ci.LinearSeq[Long](0x123456789L).stepper ) + Okay( ci.List[Long](0x123456789L).stepper ) + Okay( ci.ListMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( ci.ListMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( ci.ListSet[Long](0x123456789L).stepper ) + Okay( ci.LongMap[Long](9876543210L -> 0x123456789L).keyStepper ) + Okay( ci.LongMap[Long](9876543210L -> 0x123456789L).valueStepper ) + Okay( ci.Map[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( ci.Map[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( ci.Queue[Long](0x123456789L).stepper ) + Okay( ci.Seq[Long](0x123456789L).stepper ) + Okay( ci.Set[Long](0x123456789L).stepper ) + Okay( ci.SortedMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( ci.SortedMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( ci.SortedSet[Long](0x123456789L).stepper ) + Okay( ci.Stack[Long](0x123456789L).stepper ) + Okay( ci.Stream[Long](0x123456789L).stepper ) + _eh_( ci.Stream[Long](0x123456789L).view.stepper ) + IFFY( ci.Traversable[Long](0x123456789L).accumulate.stepper ) + Okay( ci.TreeMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( ci.TreeMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( ci.TreeSet[Long](0x123456789L).stepper ) + good( ci.Vector[Long](0x123456789L).stepper ) + + // Mutable section + Okay( (cm.ArrayBuffer[Long](0x123456789L): cm.AbstractBuffer[Long]).stepper ) + Okay( (cm.PriorityQueue[Long](0x123456789L): cm.AbstractIterable[Long]).stepper ) + Okay( (cm.HashMap[Long, Long](1234567654321L -> 0x123456789L): cm.AbstractMap[Long, Long]).keyStepper ) + Okay( (cm.HashMap[Long, Long](1234567654321L -> 0x123456789L): cm.AbstractMap[Long, Long]).valueStepper ) + Okay( (cm.ArrayBuffer[Long](0x123456789L): cm.AbstractSeq[Long]).stepper ) + Okay( (cm.HashSet[Long](0x123456789L): cm.AbstractSet[Long]).stepper ) + Okay( cm.AnyRefMap[String,Long]("fish" -> 0x123456789L).valueStepper ) + good( cm.ArrayBuffer[Long](0x123456789L).stepper ) + good( ((Array(0x123456789L): cm.WrappedArray[Long]): cm.ArrayLike[Long, cm.WrappedArray[Long]]).stepper ) + good( (Array(0x123456789L): cm.ArrayOps[Long]).stepper ) + good( cm.ArraySeq[Long](0x123456789L).stepper ) + _eh_( cm.ArrayStack[Long](0x123456789L).stepper ) + Okay( (cm.ArrayBuffer[Long](0x123456789L): cm.Buffer[Long]).stepper ) + good( cm.HashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + good( cm.HashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + good( cm.HashSet[Long](0x123456789L).stepper ) + good( cm.IndexedSeq[Long](0x123456789L).stepper ) + good( cm.IndexedSeq[Long](0x123456789L).view.stepper ) + Okay( cm.Iterable[Long](0x123456789L).stepper ) + Okay( cm.LinearSeq[Long](0x123456789L).stepper ) + good( cm.LinkedHashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + good( cm.LinkedHashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( cm.LinkedHashSet[Long](0x123456789L).stepper ) + Okay( cm.ListBuffer[Long](0x123456789L).stepper ) + Okay( cm.ListMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cm.ListMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( cm.LongMap[Long](9876543210L -> 0x123456789L).keyStepper ) + Okay( cm.LongMap[Long](9876543210L -> 0x123456789L).valueStepper ) + Okay( cm.Map[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cm.Map[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( cm.MutableList[Long](0x123456789L).stepper ) + Okay( cm.OpenHashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cm.OpenHashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( cm.PriorityQueue[Long](0x123456789L).stepper ) + Okay( cm.Queue[Long](0x123456789L).stepper ) + good( cm.ResizableArray[Long](0x123456789L).stepper ) + Okay( cm.Seq[Long](0x123456789L).stepper ) + Okay( cm.Set[Long](0x123456789L).stepper ) + Okay( cm.SortedSet[Long](0x123456789L).stepper ) + Okay( cm.Stack[Long](0x123456789L).stepper ) + IFFY( cm.Traversable[Long](0x123456789L).accumulate.stepper ) + Okay( cm.TreeSet[Long](0x123456789L).stepper ) + Okay( cm.UnrolledBuffer[Long](0x123456789L).stepper ) + Okay( cm.WeakHashMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cm.WeakHashMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + good( (Array(0x123456789L): cm.WrappedArray[Long]).stepper ) + + // Java 6 converters section + + // Concurrent section + Okay( cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L).keyStepper ) + Okay( cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L).valueStepper ) + Okay( (cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L): cc.Map[Long, Long]).keyStepper ) + Okay( (cc.TrieMap[Long, Long](1234567654321L -> 0x123456789L): cc.Map[Long, Long]).valueStepper ) + } + + @Test + def comprehensivelySpecific(): Unit = { + implicit val spec = SpecCheck(_.isInstanceOf[IntStepper], x => s"$x should be an IntStepper") + + good( ci.NumericRange(277: Short, 279: Short, 1: Short).stepper ) + good( ("salmon": ci.StringOps).stepper ) + good( ("salmon": ci.WrappedString).stepper ) + } +} diff --git a/src/test/scala-2.13-/scala/compat/java8/StepperTest.scala b/src/test/scala-2.13-/scala/compat/java8/StepperTest.scala new file mode 100644 index 0000000..7bbbcd9 --- /dev/null +++ b/src/test/scala-2.13-/scala/compat/java8/StepperTest.scala @@ -0,0 +1,319 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import org.junit.Test +import org.junit.Assert._ + +import java.util.Spliterator + +import collectionImpl._ + + +class IncStepperA(private val size0: Long) extends NextStepper[Int] { + if (size0 < 0) throw new IllegalArgumentException("Size must be >= 0L") + private var i = 0L + def characteristics = Stepper.Sized | Stepper.SubSized | Stepper.Ordered + def knownSize = math.max(0L, size0 - i) + def hasStep = i < size0 + def nextStep() = { i += 1; (i - 1).toInt } + def substep() = if (knownSize <= 1) null else { + val sub = new IncStepperA(size0 - (size0 - i)/2) + sub.i = i + i = sub.size0 + sub + } +} + +class IncStepperB(private val size0: Long) extends TryStepper[Int] { + if (size0 < 0) throw new IllegalArgumentException("Size must be >= 0L") + protected var myCache: Int = 0 + private var i = 0L + def characteristics = Stepper.Sized | Stepper.SubSized | Stepper.Ordered + def knownUncachedSize = math.max(0L, size0 - i) + protected def tryUncached(f: Int => Unit): Boolean = if (i >= size0) false else { f(i.toInt); i += 1; true } + def substep() = if (knownSize <= 1) null else { + val sub = new IncStepperB(size0 - (size0 - i)/2) + sub.i = i + i = sub.size0 + sub + } +} + +class IncSpliterator(private val size0: Long) extends Spliterator.OfInt { + if (size0 < 0) throw new IllegalArgumentException("Size must be >= 0L") + private var i = 0L + def characteristics() = Stepper.Sized | Stepper.SubSized | Stepper.Ordered + def estimateSize() = math.max(0L, size0 - i) + def tryAdvance(f: java.util.function.IntConsumer): Boolean = if (i >= size0) false else { f.accept(i.toInt); i += 1; true } + def trySplit(): Spliterator.OfInt = if (i+1 >= size0) null else { + val sub = new IncSpliterator(size0 - (size0 - i)/2) + sub.i = i + i = sub.size0 + sub + } + override def forEachRemaining(f: java.util.function.IntConsumer): Unit = { while (i < size0) { f.accept(i.toInt); i += 1 } } +} + +class MappingStepper[@specialized (Double, Int, Long) A, @specialized(Double, Int, Long) B](underlying: Stepper[A], mapping: A => B) extends Stepper[B] { + def characteristics = underlying.characteristics + def knownSize = underlying.knownSize + def hasStep = underlying.hasStep + def nextStep() = mapping(underlying.nextStep()) + def tryStep(f: B => Unit): Boolean = underlying.tryStep(a => f(mapping(a))) + override def foreach(f: B => Unit): Unit = { underlying.foreach(a => f(mapping(a))) } + def substep() = { + val undersub = underlying.substep() + if (undersub == null) null + else new MappingStepper(undersub, mapping) + } + def spliterator: Spliterator[B] = new MappingSpliterator[A, B](underlying.spliterator, mapping) +} + +class MappingSpliterator[A, B](private val underlying: Spliterator[A], mapping: A => B) extends Spliterator[B] { + def characteristics = underlying.characteristics + def estimateSize() = underlying.estimateSize() + def tryAdvance(f: java.util.function.Consumer[_ >: B]): Boolean = underlying.tryAdvance(new java.util.function.Consumer[A]{ def accept(a: A): Unit = { f.accept(mapping(a)) } }) + def trySplit(): Spliterator[B] = { + val undersplit = underlying.trySplit() + if (undersplit == null) null + else new MappingSpliterator(undersplit, mapping) + } +} +class IntToGenericSpliterator[A](private val underlying: Spliterator.OfInt, mapping: Int => A) extends Spliterator[A] { + def characteristics = underlying.characteristics + def estimateSize() = underlying.estimateSize() + def tryAdvance(f: java.util.function.Consumer[_ >: A]): Boolean = underlying.tryAdvance(new java.util.function.IntConsumer{ def accept(a: Int): Unit = { f.accept(mapping(a)) } }) + def trySplit(): Spliterator[A] = { + val undersplit = underlying.trySplit() + if (undersplit == null) null + else new IntToGenericSpliterator[A](undersplit, mapping) + } +} +class IntToDoubleSpliterator(private val underlying: Spliterator.OfInt, mapping: Int => Double) extends Spliterator.OfDouble { + def characteristics = underlying.characteristics + def estimateSize() = underlying.estimateSize() + def tryAdvance(f: java.util.function.DoubleConsumer): Boolean = underlying.tryAdvance(new java.util.function.IntConsumer{ def accept(a: Int): Unit = { f.accept(mapping(a)) } }) + def trySplit(): Spliterator.OfDouble = { + val undersplit = underlying.trySplit() + if (undersplit == null) null + else new IntToDoubleSpliterator(undersplit, mapping) + } +} +class IntToLongSpliterator(private val underlying: Spliterator.OfInt, mapping: Int => Long) extends Spliterator.OfLong { + def characteristics = underlying.characteristics + def estimateSize() = underlying.estimateSize() + def tryAdvance(f: java.util.function.LongConsumer): Boolean = underlying.tryAdvance(new java.util.function.IntConsumer{ def accept(a: Int): Unit = { f.accept(mapping(a)) } }) + def trySplit(): Spliterator.OfLong = { + val undersplit = underlying.trySplit() + if (undersplit == null) null + else new IntToLongSpliterator(undersplit, mapping) + } +} + + +class StepperTest { + def subs[Z, A, CC <: Stepper[A]](zero: Z)(s: Stepper[A])(f: Stepper[A] => Z, op: (Z, Z) => Z): Z = { + val ss = s.substep() + if (ss == null) op(zero, f(s)) + else { + val left = subs(zero)(ss)(f, op) + subs(left)(s)(f, op) + } + } + + val sizes = Vector(0, 1, 2, 4, 15, 17, 2512) + def sources: Vector[(Int, Stepper[Int])] = sizes.flatMap{ i => + Vector( + i -> new IncStepperA(i), + i -> new IncStepperB(i), + i -> Stepper.ofSpliterator(new IncSpliterator(i)), + i -> new MappingStepper[Int,Int](new IncStepperA(i), x => x), + i -> new MappingStepper[Long, Int](Stepper.ofSpliterator(new IntToLongSpliterator(new IncSpliterator(i), _.toLong)), _.toInt), + i -> new MappingStepper[Double, Int](Stepper.ofSpliterator(new IntToDoubleSpliterator(new IncSpliterator(i), _.toDouble)), _.toInt), + i -> new MappingStepper[String, Int](Stepper.ofSpliterator(new IntToGenericSpliterator[String](new IncSpliterator(i), _.toString)), _.toInt) + ) ++ + { + // Implicitly converted instead of explicitly + import SpliteratorConverters._ + Vector[(Int, Stepper[Int])]( + i -> (new IncSpliterator(i)).stepper, + i -> new MappingStepper[Long, Int]((new IntToLongSpliterator(new IncSpliterator(i), _.toLong)).stepper, _.toInt), + i -> new MappingStepper[Double, Int]((new IntToDoubleSpliterator(new IncSpliterator(i), _.toDouble)).stepper, _.toInt), + i -> new MappingStepper[String, Int]((new IntToGenericSpliterator[String](new IncSpliterator(i), _.toString)).stepper, _.toInt) + ) + } + } + + @Test + def stepping(): Unit = { + sources.foreach{ case (i, s) => assert((0 until i).forall{ j => s.hasStep && s.nextStep == j } && !s.hasStep) } + sources.foreach{ case (i, s) => + val set = collection.mutable.BitSet.empty + subs(0)(s)( + { x => + while (x.hasStep) { val y = x.nextStep; assert(!(set contains y)); set += y } + 0 + }, + _ + _ + ) + assert((0 until i).toSet == set) + } + } + + @Test + def trying(): Unit = { + sources.foreach{ case (i,s) => + val set = collection.mutable.BitSet.empty + while (s.tryStep{ y => assert(!(set contains y)); set += y }) {} + assert((0 until i).toSet == set) + } + sources.foreach{ case (i,s) => + val set = collection.mutable.BitSet.empty + subs(0)(s)( + { x => + while(x.tryStep{ y => assert(!(set contains y)); set += y }) {} + 0 + }, + _ + _ + ) + assertTrue(s.getClass.getName + s" said [0, $i) was " + set.mkString("{", " ", "}"), (0 until i).toSet == set) + } + } + + @Test + def substepping(): Unit = { + sources.foreach{ case (i,s) => + val ss = s.substep + assertEquals(ss == null, i < 2) + if (ss != null) { + assertTrue(s.hasStep) + assertTrue(ss.hasStep) + val c1 = s.count + val c2 = ss.count + assertEquals(s"$i != $c1 + $c2 from ${s.getClass.getName}", i, c1 + c2) + } + else assertEquals(i, s.count) + } + } + + @Test + def characteristically(): Unit = { + val expected = Stepper.Sized | Stepper.SubSized | Stepper.Ordered + sources.foreach{ case (_,s) => assertEquals(s.characteristics, expected)} + sources.foreach{ case (_,s) => subs(0)(s)(x => { assertEquals(x.characteristics, expected); 0 }, _ + _) } + } + + @Test + def knownSizes(): Unit = { + sources.foreach{ case (i,s) => assertEquals(i.toLong, s.knownSize) } + sources.foreach{ case (i,s) => if (i > 0) subs(0)(s)(x => { assertEquals(x.knownSize, 1L); 0 }, _ + _) } + } + + @Test + def count_only(): Unit = { + sources.foreach{ case (i, s) => assertEquals(i, s.count) } + sources.foreach{ case (i, s) => assertEquals(i, subs(0)(s)(_.count.toInt, _ + _)) } + } + + @Test + def count_conditionally(): Unit = { + sources.foreach{ case (i, s) => assertEquals((0 until i).count(_ % 3 == 0), s.count(_ % 3 == 0)) } + sources.foreach{ case (i, s) => assertEquals((0 until i).count(_ % 3 == 0), subs(0)(s)(_.count(_ % 3 == 0).toInt, _ + _)) } + } + + @Test + def existence(): Unit = { + sources.foreach{ case (i, s) => assert(i > 0 == s.exists(_ >= 0)) } + sources.foreach{ case (i, s) => assert(i > 16 == s.exists(_ % 17 == 16)) } + sources.foreach{ case (i, s) => assert(i > 0 == subs(false)(s)(_.exists(_ >= 0), _ || _)) } + sources.foreach{ case (i, s) => assert(i > 16 == subs(false)(s)(_.exists(_ % 17 == 16), _ || _)) } + } + + @Test + def finding(): Unit = { + for (k <- 0 until 100) { + (sources zip sources).foreach{ case ((i,s), (j,t)) => + val x = util.Random.nextInt(math.min(i,j)+3) + val a = s.find(_ == x) + val b = subs(None: Option[Int])(t)(_.find(_ == x), _ orElse _) + assertEquals(a, b) + assertEquals(a.isDefined, x < math.min(i,j)) + } + } + } + + @Test + def folding(): Unit = { + sources.foreach{ case (i,s) => assertEquals((0 until i).mkString, s.fold("")(_ + _.toString)) } + sources.foreach{ case (i,s) => assertEquals((0 until i).mkString, subs("")(s)(_.fold("")(_ + _.toString), _ + _)) } + sources.foreach{ case (i,s) => assertEquals((0 until i).map(_.toDouble).sum, s.fold(0.0)(_ + _), 1e-10) } + sources.foreach{ case (i,s) => assertEquals((0 until i).map(_.toDouble).sum, subs(0.0)(s)(_.fold(0.0)(_ + _), _ + _), 1e-10) } + } + + @Test + def foldingUntil(): Unit = { + def expected(i: Int) = (0 until i).scan(0)(_ + _).dropWhile(_ < 6*i).headOption.getOrElse((0 until i).sum) + sources.foreach{ case (i,s) => assertEquals(expected(i), s.foldTo(0)(_ + _)(_ >= 6*i)) } + sources.foreach{ case (_,s) => assertEquals(-1, s.foldTo(-1)(_ * _)(_ => true)) } + sources.foreach{ case (i,s) => + val ss = s.substep + val x = s.foldTo( if (ss == null) 0 else ss.foldTo(0)(_ + _)(_ >= 6*i) )(_ + _)(_ >= 6*i) + assertEquals(expected(i), x) + } + } + + @Test + def foreaching(): Unit = { + sources.foreach{ case (i,s) => + val clq = new java.util.concurrent.ConcurrentLinkedQueue[String] + s.foreach( clq add _.toString ) + assertEquals((0 until i).map(_.toString).toSet, Iterator.continually(if (!clq.isEmpty) Some(clq.poll) else None).takeWhile(_.isDefined).toSet.flatten) + } + sources.foreach{ case (i,s) => + val clq = new java.util.concurrent.ConcurrentLinkedQueue[String] + subs(())(s)(_.foreach( clq add _.toString ), (_, _) => ()) + assertEquals((0 until i).map(_.toString).toSet, Iterator.continually(if (!clq.isEmpty) Some(clq.poll) else None).takeWhile(_.isDefined).toSet.flatten) + } + } + + @Test + def reducing(): Unit = { + sources.foreach{ case (i,s) => + if (i==0) assertEquals(s.hasStep, false) + else assertEquals((0 until i).sum, s.reduce(_ + _)) + } + sources.foreach{ case (i,s) => + assertEquals((0 until i).sum, subs(0)(s)(x => if (!x.hasStep) 0 else x.reduce(_ + _), _ + _)) + } + } + + @Test + def iterating(): Unit = { + sources.foreach{ case (i, s) => assert(Iterator.range(0,i) sameElements s.iterator) } + } + + @Test + def spliterating(): Unit = { + sources.foreach{ case (i,s) => + var sum = 0 + s.spliterator.forEachRemaining(new java.util.function.Consumer[Int]{ def accept(i: Int): Unit = { sum += i } }) + assertEquals(sum, (0 until i).sum) + } + sources.foreach{ case (i,s) => + val sum = subs(0)(s)(x => { var sm = 0; x.spliterator.forEachRemaining(new java.util.function.Consumer[Int]{ def accept(i: Int): Unit = { sm += i } }); sm }, _ + _) + assertEquals(sum, (0 until i).sum) + } + } +} + diff --git a/src/test/scala-2.13-/scala/compat/java8/StreamConvertersTest.scala b/src/test/scala-2.13-/scala/compat/java8/StreamConvertersTest.scala new file mode 100644 index 0000000..e0baef7 --- /dev/null +++ b/src/test/scala-2.13-/scala/compat/java8/StreamConvertersTest.scala @@ -0,0 +1,319 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import scala.language.higherKinds + +import org.junit.Test +import org.junit.Assert._ + +import java.util.stream._ +import StreamConverters._ +import scala.compat.java8.collectionImpl.IntStepper +import scala.compat.java8.converterImpl.MakesStepper + +class StreamConvertersTest { + + def assertEq[A](a1: A, a2: A, s: String): Unit = { assertEquals(s, a1, a2) } // Weird order normally! + def assertEq[A](a1: A, a2: A): Unit = { assertEq(a1, a2, "not equal") } + def assert(b: Boolean): Unit = { assertTrue(b) } + def assert(b: Boolean, s: String): Unit = { assertTrue(s, b) } + + def arrayO(n: Int) = (1 to n).map(_.toString).toArray + def arrayD(n: Int) = (1 to n).map(_.toDouble).toArray + def arrayI(n: Int) = (1 to n).toArray + def arrayL(n: Int) = (1 to n).map(_.toLong).toArray + + def newStream(n: Int) = java.util.Arrays.stream(arrayO(n)) + def newDoubleStream(n: Int) = java.util.Arrays.stream(arrayD(n)) + def newIntStream(n: Int) = java.util.Arrays.stream(arrayI(n)) + def newLongStream(n: Int) = java.util.Arrays.stream(arrayL(n)) + + val ns = Vector(0, 1, 2, 12, 15, 16, 17, 31, 32, 33, 151, 1298, 7159) + + @Test + def streamAccumulate(): Unit = { + for (n <- ns) { + val vecO = arrayO(n).toVector + val accO = newStream(n).parallel.accumulate + assertEq(vecO, newStream(n).accumulate.to[Vector], s"stream $n to vector") + assertEq(vecO, accO.to[Vector], s"stream $n to vector in parallel") + assertEq(vecO, accO.toArray.toVector, s"stream $n to vector via array in parallel") + assertEq(vecO, accO.iterator.toVector, s"stream $n to vector via iterator in parallel") + assertEq(vecO, accO.toList.toVector, s"stream $n to vector via list in parallel") + assert((0 until accO.size.toInt).forall(i => vecO(i) == accO(i)), s"stream $n indexed via accumulator") + assert(accO.isInstanceOf[scala.compat.java8.collectionImpl.Accumulator[_]], s"stream $n to generic accumulator") + + for (boxless <- Seq(false, true)) { + val sbox = (if (boxless) "" else "(boxed)") + val vecD = arrayD(n).toVector + val accD = + if (boxless) newDoubleStream(n).parallel.accumulate + else newDoubleStream(n).boxed.parallel.accumulatePrimitive + assertEq(vecD, newDoubleStream(n).accumulate.to[Vector], s"double stream $n to vector $sbox") + assertEq(vecD, accD.to[Vector], s"double stream $n to vector in parallel $sbox") + assertEq(vecD, accD.toArray.toVector, s"double stream $n to vector via array in parallel $sbox") + assertEq(vecD, accD.iterator.toVector, s"double stream $n to vector via iterator in parallel $sbox") + assertEq(vecD, accD.toList.toVector, s"double stream $n to vector via list in parallel $sbox") + assert((0 until accD.size.toInt).forall(i => vecD(i) == accD(i)), s"double stream $n indexed via accumulator $sbox") + assert(accD.isInstanceOf[scala.compat.java8.collectionImpl.DoubleAccumulator], s"double stream $n to generic accumulator $sbox") + + val vecI = arrayI(n).toVector + val accI = + if (boxless) newIntStream(n).parallel.accumulate + else newIntStream(n).boxed.parallel.accumulatePrimitive + assertEq(vecI, newIntStream(n).accumulate.to[Vector], s"int stream $n to vector $sbox") + assertEq(vecI, accI.to[Vector], s"int stream $n to vector in parallel $sbox") + assertEq(vecI, accI.toArray.toVector, s"int stream $n to vector via array in parallel $sbox") + assertEq(vecI, accI.iterator.toVector, s"int stream $n to vector via iterator in parallel $sbox") + assertEq(vecI, accI.toList.toVector, s"int stream $n to vector via list in parallel $sbox") + assert((0 until accI.size.toInt).forall(i => vecI(i) == accI(i)), s"int stream $n indexed via accumulator $sbox") + assert(accI.isInstanceOf[scala.compat.java8.collectionImpl.IntAccumulator], s"int stream $n to generic accumulator $sbox") + + val vecL = arrayL(n).toVector + val accL = + if (boxless) newLongStream(n).parallel.accumulate + else newLongStream(n).boxed.parallel.accumulatePrimitive + assertEq(vecL, newLongStream(n).accumulate.to[Vector], s"long stream $n to vector $sbox") + assertEq(vecL, accL.to[Vector], s"long stream $n to vector in parallel $sbox") + assertEq(vecL, accL.toArray.toVector, s"long stream $n to vector via array in parallel $sbox") + assertEq(vecL, accL.iterator.toVector, s"long stream $n to vector via iterator in parallel $sbox") + assertEq(vecL, accL.toList.toVector, s"long stream $n to vector via list in parallel $sbox") + assert((0 until accL.size.toInt).forall(i => vecL(i) == accL(i)), s"long stream $n indexed via accumulator $sbox") + assert(accL.isInstanceOf[scala.compat.java8.collectionImpl.LongAccumulator], s"long stream $n to generic accumulator $sbox") + } + } + } + + @Test + def streamToScala(): Unit = { + for (n <- ns) { + val vecO = arrayO(n).toVector + assertEq(vecO, newStream(n).toScala[Vector]) + assertEq(vecO, newStream(n).parallel.toScala[Vector]) + + val vecD = arrayD(n).toVector + assertEq(vecD, newDoubleStream(n).toScala[Vector]) + assertEq(vecD, newDoubleStream(n).parallel.toScala[Vector]) + + val vecI = arrayI(n).toVector + assertEq(vecI, newIntStream(n).toScala[Vector]) + assertEq(vecI, newIntStream(n).parallel.toScala[Vector]) + + val vecL = arrayL(n).toVector + assertEq(vecL, newLongStream(n).toScala[Vector]) + assertEq(vecL, newLongStream(n).parallel.toScala[Vector]) + } + } + + @Test + def streamUnbox(): Unit = { + assert(newDoubleStream(1).boxed.unboxed.isInstanceOf[DoubleStream]) + assert(newIntStream(1).boxed.unboxed.isInstanceOf[IntStream]) + assert(newLongStream(1).boxed.unboxed.isInstanceOf[LongStream]) + } + + import collection.mutable.{ ArrayBuffer, WrappedArray } + def abufO(n: Int) = { val ab = new ArrayBuffer[String]; arrayO(n).foreach(ab += _); ab } + def abufD(n: Int) = { val ab = new ArrayBuffer[Double]; arrayD(n).foreach(ab += _); ab } + def abufI(n: Int) = { val ab = new ArrayBuffer[Int]; arrayI(n).foreach(ab += _); ab } + def abufL(n: Int) = { val ab = new ArrayBuffer[Long]; arrayL(n).foreach(ab += _); ab } + def wrapO(n: Int): WrappedArray[String] = arrayO(n) + def wrapD(n: Int): WrappedArray[Double] = arrayD(n) + def wrapI(n: Int): WrappedArray[Int] = arrayI(n) + def wrapL(n: Int): WrappedArray[Long] = arrayL(n) + def vectO(n: Int) = arrayO(n).toVector + def vectD(n: Int) = arrayD(n).toVector + def vectI(n: Int) = arrayI(n).toVector + def vectL(n: Int) = arrayL(n).toVector + def genhset[A](aa: Array[A]) = { val hs = new collection.mutable.HashSet[A]; aa.foreach(hs += _); hs } + def hsetO(n: Int) = genhset(arrayO(n)) + def hsetD(n: Int) = genhset(arrayD(n)) + def hsetI(n: Int) = genhset(arrayI(n)) + def hsetL(n: Int) = genhset(arrayL(n)) + + @Test + def scalaToStream(): Unit = { + for (n <- ns) { + val arrO = arrayO(n) + val seqO = arrO.toSeq + val abO = abufO(n) + val wrO = wrapO(n) + val vecO = vectO(n) + val hsO = hsetO(n) + // Seems like a lot of boilerplate, but we need it to test implicit resolution + assertEq(seqO, seqO.seqStream.toScala[Seq]) + assertEq(seqO, seqO.stepper.parStream.toScala[Seq]) // Must go through stepper if we're unsure whether we can parallelize well + assertEq(seqO, arrO.seqStream.toScala[Seq]) + assertEq(seqO, arrO.parStream.toScala[Seq]) + assertEq(seqO, abO.seqStream.toScala[Seq]) + assertEq(seqO, abO.parStream.toScala[Seq]) + assertEq(seqO, wrO.seqStream.toScala[Seq]) + assertEq(seqO, wrO.parStream.toScala[Seq]) + assertEq(seqO, vecO.seqStream.toScala[Seq]) + assertEq(seqO, vecO.parStream.toScala[Seq]) + assertEq(seqO, hsO.seqStream.toScala[Seq].sortBy(_.toInt)) + assertEq(seqO, hsO.parStream.toScala[Seq].sortBy(_.toInt)) + + val arrD = arrayD(n) + val seqD = arrD.toSeq + val abD = abufD(n) + val wrD = wrapD(n) + val vecD = vectD(n) + val hsD = hsetD(n) + assertEq(seqD, seqD.seqStream.toScala[Seq]) + assertEq(seqD, seqD.stepper.parStream.toScala[Seq]) + assertEq(seqD, arrD.seqStream.toScala[Seq]) + assertEq(seqD, arrD.parStream.toScala[Seq]) + assert(arrD.seqStream.isInstanceOf[DoubleStream]) + assert(arrD.parStream.isInstanceOf[DoubleStream]) + assertEq(seqD, abD.seqStream.toScala[Seq]) + assertEq(seqD, abD.parStream.toScala[Seq]) + assert(abD.seqStream.isInstanceOf[DoubleStream]) + assert(abD.parStream.isInstanceOf[DoubleStream]) + assertEq(seqD, wrD.seqStream.toScala[Seq]) + assertEq(seqD, wrD.parStream.toScala[Seq]) + assert(wrD.seqStream.isInstanceOf[DoubleStream]) + assert(wrD.parStream.isInstanceOf[DoubleStream]) + assertEq(seqD, vecD.seqStream.toScala[Seq]) + assertEq(seqD, vecD.parStream.toScala[Seq]) + assert(vecD.seqStream.isInstanceOf[DoubleStream]) + assert(vecD.parStream.isInstanceOf[DoubleStream]) + assertEq(seqD, hsD.seqStream.toScala[Seq].sorted) + assertEq(seqD, hsD.parStream.toScala[Seq].sorted) + assert(hsD.seqStream.isInstanceOf[DoubleStream]) + assert(hsD.parStream.isInstanceOf[DoubleStream]) + + val arrI = arrayI(n) + val seqI = arrI.toSeq + val abI = abufI(n) + val wrI = wrapI(n) + val vecI = vectI(n) + val hsI = hsetI(n) + assertEq(seqI, seqI.seqStream.toScala[Seq]) + assertEq(seqI, seqI.stepper.parStream.toScala[Seq]) + assertEq(seqI, arrI.seqStream.toScala[Seq]) + assertEq(seqI, arrI.parStream.toScala[Seq]) + assert(arrI.seqStream.isInstanceOf[IntStream]) + assert(arrI.parStream.isInstanceOf[IntStream]) + assertEq(seqI, abI.seqStream.toScala[Seq]) + assertEq(seqI, abI.parStream.toScala[Seq]) + assert(abI.seqStream.isInstanceOf[IntStream]) + assert(abI.parStream.isInstanceOf[IntStream]) + assertEq(seqI, wrI.seqStream.toScala[Seq]) + assertEq(seqI, wrI.parStream.toScala[Seq]) + assert(wrI.seqStream.isInstanceOf[IntStream]) + assert(wrI.parStream.isInstanceOf[IntStream]) + assertEq(seqI, vecI.seqStream.toScala[Seq]) + assertEq(seqI, vecI.parStream.toScala[Seq]) + assert(vecI.seqStream.isInstanceOf[IntStream]) + assert(vecI.parStream.isInstanceOf[IntStream]) + assertEq(seqI, hsI.seqStream.toScala[Seq].sorted) + assertEq(seqI, hsI.parStream.toScala[Seq].sorted) + assert(hsI.seqStream.isInstanceOf[IntStream]) + assert(hsI.parStream.isInstanceOf[IntStream]) + + val arrL = arrayL(n) + val seqL = arrL.toSeq + val abL = abufL(n) + val wrL = wrapL(n) + val vecL = vectL(n) + val hsL = hsetL(n) + assertEq(seqL, seqL.seqStream.toScala[Seq]) + //assertEq(seqL, seqL.stepper.parStream.toScala[Seq]) + assertEq(seqL, arrL.seqStream.toScala[Seq]) + assertEq(seqL, arrL.parStream.toScala[Seq]) + assert(arrL.seqStream.isInstanceOf[LongStream]) + assert(arrL.parStream.isInstanceOf[LongStream]) + assertEq(seqL, abL.seqStream.toScala[Seq]) + assertEq(seqL, abL.parStream.toScala[Seq]) + assert(abL.seqStream.isInstanceOf[LongStream]) + assert(abL.parStream.isInstanceOf[LongStream]) + assertEq(seqD, wrD.seqStream.toScala[Seq]) + assertEq(seqD, wrD.parStream.toScala[Seq]) + assert(wrL.seqStream.isInstanceOf[LongStream]) + assert(wrL.parStream.isInstanceOf[LongStream]) + assertEq(seqD, wrD.seqStream.toScala[Seq]) + assertEq(seqD, wrD.parStream.toScala[Seq]) + assert(vecL.seqStream.isInstanceOf[LongStream]) + assert(vecL.parStream.isInstanceOf[LongStream]) + assertEq(seqL, hsL.seqStream.toScala[Seq].sorted) + assertEq(seqL, hsL.parStream.toScala[Seq].sorted) + assert(hsL.seqStream.isInstanceOf[LongStream]) + assert(hsL.parStream.isInstanceOf[LongStream]) + } + } + + @Test + def primitiveStreamTypes(): Unit = { + // Unboxed native + widening Steppers available: + assertEquals(Vector[Int](1, 2, 3), (Array[Int](1, 2, 3).seqStream: IntStream).toScala[Vector]) + assertEquals(Vector[Short](1.toShort, 2.toShort, 3.toShort), (Array[Short](1.toShort, 2.toShort, 3.toShort).seqStream: IntStream).toScala[Vector]) + assertEquals(Vector[String]("a", "b"), (Array[String]("a", "b").seqStream: Stream[String]).toScala[Vector]) + + // Boxed collections, widening via boxed AnySteppers: + assertEquals(Vector[Int](1, 2, 3), (Vector[Int](1, 2, 3).seqStream: IntStream).toScala[Vector]) + assertEquals(Vector[Short](1.toShort, 2.toShort, 3.toShort), (Vector[Short](1.toShort, 2.toShort, 3.toShort).seqStream: IntStream).toScala[Vector]) + assertEquals(Vector[String]("a", "b"), (Vector[String]("a", "b").seqStream: Stream[String]).toScala[Vector]) + } + + @Test + def streamMaterialization(): Unit = { + val coll = collection.mutable.WrappedArray.make[Int](Array(1,2,3)) + val streamize = implicitly[collection.mutable.WrappedArray[Int] => MakesSequentialStream[Int, IntStream]] + assertTrue(streamize(coll).getClass.getName.contains("EnrichIntWrappedArrayWithStream")) + val steppize = implicitly[collection.mutable.WrappedArray[Int] => MakesStepper[Int, Any]] + assertTrue(steppize(coll).getClass.getName.contains("RichArrayCanStep")) + val stepper = steppize(coll).stepper + assertTrue(stepper.getClass.getName.contains("StepsIntArray")) + + val ss = Vector(1,2,3).seqStream + val ss2: IntStream = ss + + val coll2 = Vector(1,2,3) + val streamize2 = implicitly[Vector[Int] => MakesSequentialStream[Int, IntStream]] + assertTrue(streamize2(coll2).getClass.getName.contains("EnrichAnySteppableWithSeqStream")) + val steppize2 = implicitly[Vector[Int] => MakesStepper[Int, Any]] + assertTrue(steppize2(coll2).getClass.getName.contains("RichVectorCanStep")) + val stepper2 = steppize2(coll2).stepper + assertTrue(stepper2.getClass.getName.contains("StepsIntVector")) + } + + @Test + def issue_87(): Unit = { + // Vectors that are generated from other vectors tend _not_ to + // have all their display vectors consistent; the cached vectors + // are correct, but the higher-level vector does _not_ contain + // the cached vector in the correct place (for efficiency)! This + // is called being "dirty", and needs to be handled specially. + val dirtyDisplayVector = Vector.fill(120)("a").slice(0, 40) + val shouldNotNPE = + dirtyDisplayVector.seqStream.collect(Collectors.toList()) + assertEq(shouldNotNPE.toArray(new Array[String](0)).toVector, dirtyDisplayVector, "Vector[Any].seqStream (with dirty display)") + + val dirtyDisplayVectorInt = Vector.fill(120)(999).slice(0, 40) + val shouldNotNPEInt = + dirtyDisplayVectorInt.seqStream.sum() + assertEq(shouldNotNPEInt, dirtyDisplayVectorInt.sum, "Vector[Int].seqStream (with dirty display)") + + val dirtyDisplayVectorLong = Vector.fill(120)(99999999999L).slice(0, 40) + val shouldNotNPELong = + dirtyDisplayVectorLong.seqStream.sum() + assertEq(shouldNotNPELong, dirtyDisplayVectorLong.sum, "Vector[Long].seqStream (with dirty display)") + + val dirtyDisplayVectorDouble = Vector.fill(120)(0.1).slice(0, 40) + val shouldNotNPEDouble = + math.rint(dirtyDisplayVectorDouble.seqStream.sum() * 10) + assertEq(shouldNotNPEDouble, math.rint(dirtyDisplayVectorDouble.sum * 10), "Vector[Double].seqStream (with dirty display)") + } +} diff --git a/src/test/scala/scala/compat/java8/DurationConvertersTest.scala b/src/test/scala/scala/compat/java8/DurationConvertersTest.scala new file mode 100644 index 0000000..443d40c --- /dev/null +++ b/src/test/scala/scala/compat/java8/DurationConvertersTest.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import java.time.{Duration => JavaDuration} + +import org.junit.Assert._ +import org.junit.Test + +import scala.util.Try + +class DurationConvertersTest { + + import DurationConverters._ + import scala.concurrent.duration._ + + @Test + def scalaNanosToJavaDuration(): Unit = { + Seq[(Long, (Long, Int))]( + (Long.MinValue + 1) -> (-9223372037L, 145224193), // because java duration nanos are offset from the "wrong" direction + -1000000001L -> (-2, 999999999), + -1L -> (-1, 999999999), + 0L -> (0, 0), + 1L -> (0, 1), + 1000000001L -> (1,1), + Long.MaxValue -> (9223372036L, 854775807) + ).foreach { case (n, (expSecs, expNanos)) => + val result = n.nanos.toJava + assertEquals(s"toJava($n nanos) -> $expSecs s)", expSecs, result.getSeconds) + assertEquals(s"toJava($n nanos) -> $expNanos n)", expNanos, result.getNano) + } + } + + @Test + def scalaMilliSecondsToJavaDuration(): Unit = { + Seq[(Long, (Long, Int))]( + -9223372036854L -> (-9223372037L, 146000000), + -1L -> (-1L, 999000000), + 0L -> (0L, 0), + 1L -> (0L, 1000000), + 9223372036854L -> (9223372036L, 854000000) + ).foreach { case (n, (expSecs, expNanos)) => + val result = n.millis.toJava + assertEquals(s"toJava($n millis) -> $expSecs s)", expSecs, result.getSeconds) + assertEquals(s"toJava($n millis) -> $expNanos n)", expNanos, result.getNano) + } + } + + @Test + def scalaMicroSecondsToJavaDuration(): Unit = { + Seq[(Long, (Long, Int))]( + -9223372036854775L -> (-9223372037L, 145225000), + -1L -> (-1L, 999999000), + 0L -> (0L, 0), + 1L -> (0L, 1000), + 9223372036854775L -> (9223372036L, 854775000) + ).foreach { case (n, (expSecs, expNanos)) => + val result = n.micros.toJava + assertEquals(s"toJava($n micros) -> $expSecs s)", expSecs, result.getSeconds) + assertEquals(s"toJava($n micros) -> $expNanos n)", expNanos, result.getNano) + } + } + + @Test + def scalaSecondsToJavaDuration(): Unit = { + Seq[(Long, (Long, Int))]( + -9223372036L -> (-9223372036L, 0), + -1L -> (-1L, 0), + 0L -> (0L, 0), + 1L -> (1L, 0), + 9223372036L -> (9223372036L, 0) + ).foreach { case (n, (expSecs, expNanos)) => + val result = n.seconds.toJava + assertEquals(expSecs, result.getSeconds) + assertEquals(expNanos, result.getNano) + } + } + + + @Test + def javaSecondsToScalaDuration(): Unit = { + Seq[Long](-9223372036L, -1L, 0L, 1L, 9223372036L).foreach { n => + assertEquals(n, toScala(JavaDuration.ofSeconds(n)).toSeconds) + } + } + + + @Test + def javaNanosPartToScalaDuration(): Unit = { + val nanosPerSecond = 1000000000L + Seq[Long](-nanosPerSecond - 1L, 0L, 1L, nanosPerSecond - 1L).foreach { n => + assertEquals(n, toScala(JavaDuration.ofNanos(n)).toNanos) + } + } + + @Test + def unsupportedJavaDurationThrows(): Unit = { + Seq(JavaDuration.ofSeconds(-9223372037L), JavaDuration.ofSeconds(9223372037L)).foreach { d => + val res = Try { toScala(d) } + assertTrue(s"Expected exception for $d but got success", res.isFailure) + } + } + + +} diff --git a/src/test/scala/scala/compat/java8/FunctionConvertersTest.scala b/src/test/scala/scala/compat/java8/FunctionConvertersTest.scala new file mode 100644 index 0000000..b3da5ac --- /dev/null +++ b/src/test/scala/scala/compat/java8/FunctionConvertersTest.scala @@ -0,0 +1,846 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import org.junit.Test +import org.junit.Assert._ + +class FunctionConvertersTest { + import java.io.File + import java.util.function._ + import FunctionConverters._ + + val str = "fish" + val fyl = new File("salmon") + val num = 42 + val nmm = 9L + val nnn = 0.3 + + var cache: Any = null + def save(a: Any) = { cache = a; a } + def recall = { val ans = cache; cache = null; ans } + + case class Box[A](value: A) {} + + def sameS[A,B,C,D,E,F](f: (A, B) => C, g: (D, E) => F)(implicit ev1: A =:= D, ev2: B =:= E, ev3: C =:= F): Box[(A,B) => Boolean] = + Box((a: A, b: B) => f(a,b) == g(ev1(a),ev2(b))) + + def sameS[A,B,C,D](f: A => B, g: C => D)(implicit ev1: A =:= C, ev2: B =:= D): Box[A => Boolean] = + Box((a: A) => f(a) == g(ev1(a))) + + // BiConsumer tests; conceptually widens to BiFunction, narrows to ObjLongConsumer + @Test + def test_BiConsumer(): Unit = { + val bic1 = new BiConsumer[String, File]{ def accept(s: String, f: File): Unit = { save((s,f)) } } + val bic2 = new BiConsumer[Int, Long]{ def accept(i: Int, l: Long): Unit = { save((i,l)) } } + val sbic = (s: String, f: File) => { save((s,f)); () } + val zbic = (i: Int, l: Long) => { save((i,l)); () } + def jbic[A, B](bic: BiConsumer[A, B])(a: A, b: B) = { bic.accept(a,b); recall == ((a,b)) } + def fbic[A, B](f: (A,B) => Unit)(a: A, b: B) = { f(a,b); recall == ((a,b)) } + assert(jbic(asJavaBiConsumer(sbic))(str, fyl)) + assert(jbic(asJavaBiConsumer(zbic))(num, nmm)) + assert(jbic(sbic.asJava)(str, fyl)) + // assert(jbic(zbic.asJava)(num, nmm)) -- ObjLongConsumer + assert(fbic(asScalaFromBiConsumer(bic1))(str, fyl)) + assert(fbic(asScalaFromBiConsumer(bic2))(num, nmm)) + assert(fbic(bic1.asScala)(str, fyl)) + assert(fbic(bic2.asScala)(num, nmm)) + } + + + // BiFunction tests; conceptually narrows to any of the Bi functions or to ObjLongConsumer etc + @Test + def test_BiFunction(): Unit = { + val bif1 = new BiFunction[String, File, (String, File)]{ def apply(s: String, f: File) = (s,f) } + val bif2 = new BiFunction[Int, Long, Double]{ def apply(i: Int, l: Long) = i.toDouble*l } + val sbif = (s: String, f: File) => (s,f) + val zbif = (i: Int, l: Long) => i.toDouble*l + def sameJ[A,B,C,D,E,F](f: BiFunction[A, B, C], g: BiFunction[D, E, F])(implicit ev1: A =:= D, ev2: B =:= E, ev3: C =:= F) = + Box((a: A, b: B) => f.apply(a,b) == g.apply(ev1(a), ev2(b))) + assert(sameJ(bif1, sbif.asJava).value(str,fyl)) + assert(sameJ(bif1, asJavaBiFunction(sbif)).value(str,fyl)) + // assert(sameJ(bif2, zbif.asJava)) -- ToDoubleBiFunction + assert(sameJ(bif2, asJavaBiFunction(zbif)).value(num,nmm)) + assert(sameS(bif1.asScala, sbif).value(str,fyl)) + assert(sameS(asScalaFromBiFunction(bif1), sbif).value(str,fyl)) + assert(sameS(bif2.asScala, zbif).value(num,nmm)) + assert(sameS(asScalaFromBiFunction(bif2), zbif).value(num,nmm)) + } + + // BinaryOperator tests; actually widens to BiFunction and conceptually narrows to IntBinaryOperator etc. + @Test + def test_BinaryOperator(): Unit = { + val bop1 = new BinaryOperator[String]{ def apply(s: String, t: String) = s + t } + val bop2 = new BinaryOperator[Int]{ def apply(i: Int, j: Int) = i + j } + val sbop = (s: String, t: String) => s + t + val zbop = (i: Int, j: Int) => i + j + def sameJ[A,B](f: BinaryOperator[A], g: BinaryOperator[B])(implicit ev1: A =:= B) = + Box((a1: A, a2: A) => f.apply(a1, a2) == g.apply(ev1(a1), ev1(a2))) + assert(sameJ(bop1, sbop.asJava).value(str,str)) + assert(sameJ(bop1, asJavaBinaryOperator(sbop)).value(str,str)) + // assert(sameJ(bop2, zbop.asJava).value(num, num)) -- IntBinaryOperator + assert(sameJ(bop2, asJavaBinaryOperator(zbop)).value(num,num)) + assert(sameS(bop1.asScala, sbop).value(str,str)) + assert(sameS(asScalaFromBinaryOperator(bop1), sbop).value(str,str)) + assert(sameS(bop2.asScala, zbop).value(num,num)) + assert(sameS(asScalaFromBinaryOperator(bop2), zbop).value(num,num)) + } + + // BiPredicate tests; conceptually widens to BiFunction. Does not narrow (no IntBiPredicate or the like). + @Test + def test_BiPredicate(): Unit = { + val bip1 = new BiPredicate[String, File]{ def test(s: String, f: File) = s == f.getName } + val bip2 = new BiPredicate[Int, Long]{ def test(i: Int, l: Long) = i == l } + val sbip = (s: String, f: File) => s == f.getName + val zbip = (i: Int, l: Long) => i == l + def sameJ[A,B,C,D](f: BiPredicate[A,B], g: BiPredicate[C,D])(implicit ev1: A =:= C, ev2: B =:= D) = + Box((a: A, b: B) => f.test(a,b) == g.test(ev1(a), ev2(b))) + assert(sameJ(bip1, sbip.asJava).value(str,fyl)) + assert(sameJ(bip1, asJavaBiPredicate(sbip)).value(str,fyl)) + assert(sameJ(bip2, zbip.asJava).value(num,nmm)) + assert(sameJ(bip2, asJavaBiPredicate(zbip)).value(num, nmm)) + assert(sameS(bip1.asScala, sbip).value(str,fyl)) + assert(sameS(asScalaFromBiPredicate(bip1), sbip).value(str,fyl)) + assert(sameS(bip2.asScala, zbip).value(num, nmm)) + assert(sameS(asScalaFromBiPredicate(bip2), zbip).value(num,nmm)) + } + + // BooleanSupplier tests; conceptually widens to Supplier and Function. + @Test + def test_BooleanSupplier(): Unit = { + val bsup = new BooleanSupplier{ def getAsBoolean = true } + val sbup = () => true + def foo(bs: BooleanSupplier) = bs.getAsBoolean + def bar(f: () => Boolean) = foo(f.asJava) + def baz(bs: BooleanSupplier) = bar(bs.asScala) + assertEquals(foo(bsup), bar(sbup)) + assertEquals(foo(bsup), baz(bsup)) + assertEquals(foo(bsup), bar(asScalaFromBooleanSupplier(bsup))) + assertEquals(foo(bsup), baz(asJavaBooleanSupplier(sbup))) + } + + // Consumer tests; conceptually widens to Function and narrows to IntConsumer etc. + @Test + def test_Consumer(): Unit = { + val con1 = new Consumer[String]{ def accept(s: String): Unit = { save(s) } } + val con2 = new Consumer[Int]{ def accept(i: Int): Unit = { save(i) } } + val scon = (s: String) => { save(s); () } + val zcon = (i: Int) => { save(i); () } + def jcon[A](c: Consumer[A])(a: A) = { c.accept(a); recall == a } + def fcon[A](f: A => Unit)(a: A) = { f(a); recall == a } + assert(jcon(scon.asJava)(str)) + assert(jcon(asJavaConsumer(scon))(str)) + // assert(jcon(zcon.asJava)) -- IntConsumer + assert(jcon(asJavaConsumer(zcon))(num)) + assert(fcon(con1.asScala)(str)) + assert(fcon(asScalaFromConsumer(con1))(str)) + assert(fcon(con2.asScala)(num)) + assert(fcon(asScalaFromConsumer(con2))(num)) + } + + // DoubleBinaryOperator tests; conceptually widens to BinaryOperator, ToDoubleBiFunction, and BiFunction + @Test + def test_DoubleBinaryOperator(): Unit = { + val dbop = new DoubleBinaryOperator{ def applyAsDouble(a: Double, b: Double) = a*b } + val sdbo = (a: Double, b: Double) => a*b + def foo(dbo: DoubleBinaryOperator)(a: Double, b: Double) = dbo.applyAsDouble(a,b) + def bar(f: (Double, Double) => Double)(a: Double, b: Double) = foo(f.asJava)(a,b) + def baz(dbo: DoubleBinaryOperator)(a: Double, b: Double) = bar(dbo.asScala)(a,b) + assertEquals(foo(dbop)(nnn, nnn), bar(sdbo)(nnn, nnn), 1e-9) + assertEquals(foo(dbop)(nnn, nnn), baz(dbop)(nnn, nnn), 1e-9) + assertEquals(foo(dbop)(nnn, nnn), bar(asScalaFromDoubleBinaryOperator(dbop))(nnn, nnn), 1e-9) + assertEquals(foo(dbop)(nnn, nnn), baz(asJavaDoubleBinaryOperator(sdbo))(nnn, nnn), 1e-9) + } + + // DoubleConsumer tests; conceptually widens to Consumer and Function + @Test + def test_DoubleConsumer(): Unit = { + val dcon = new DoubleConsumer{ def accept(value: Double): Unit = { save(value) } } + val sdco = (d: Double) => { save(d); () } + def jf(dc: DoubleConsumer)(d: Double) = { dc.accept(d); recall == d } + def sf(f: Double => Unit)(d: Double) = { f(d); recall == d } + assert(jf(sdco.asJava)(nnn)) + assert(jf(asJavaDoubleConsumer(sdco))(nnn)) + assert(sf(dcon.asScala)(nnn)) + assert(sf(asScalaFromDoubleConsumer(dcon))(nnn)) + } + + // DoubleFunction tests; conceptually widens to Function, narrows to DoubleUnaryOperator and DoubleToIntFunction etc. + @Test + def test_DoubleFunction(): Unit = { + val dfn1 = new DoubleFunction[String]{ def apply(value: Double) = f"$value%.3f" } + val dfn2 = new DoubleFunction[Int]{ def apply(value: Double) = math.ceil(value).toInt } + val sdfn = (d: Double) => f"$d%.3f" + val zdfn = (d: Double) => math.ceil(d).toInt + assertEquals(dfn1(nnn), sdfn(nnn)) + assertEquals(dfn1(nnn), dfn1.asScala(nnn)) + assertEquals(dfn1(nnn), asScalaFromDoubleFunction(dfn1)(nnn)) + assertEquals(dfn1(nnn), sdfn.asJava(nnn)) + assertEquals(dfn1(nnn), asJavaDoubleFunction(sdfn)(nnn)) + assertEquals(dfn2(nnn), zdfn(nnn)) + assertEquals(dfn2(nnn), dfn2.asScala(nnn)) + assertEquals(dfn2(nnn), asScalaFromDoubleFunction(dfn2)(nnn)) + /// assertEquals(dfn2(nnn), zdfn.asJava(nnn)) -- DoubleToIntFunction + assertEquals(dfn2(nnn), asJavaDoubleFunction(zdfn)(nnn)) + } + + // DoublePredicate tests; conceptually widens to DoubleFunction, Predicate, and Function + @Test + def test_DoublePredicate(): Unit = { + val dprd = new DoublePredicate{ def test(value: Double) = value > 0 } + val sdpr = (d: Double) => d > 0 + def foo(dp: DoublePredicate)(d: Double) = dp.test(d) + def bar(f: Double => Boolean)(d: Double) = foo(f.asJava)(d) + def baz(dp: DoublePredicate)(d: Double) = bar(dp.asScala)(d) + assertEquals(foo(dprd)(nnn), bar(sdpr)(nnn)) + assertEquals(foo(dprd)(nnn), baz(dprd)(nnn)) + assertEquals(foo(dprd)(nnn), bar(asScalaFromDoublePredicate(dprd))(nnn)) + assertEquals(foo(dprd)(nnn), baz(asJavaDoublePredicate(sdpr))(nnn)) + } + + // DoubleSupplier tests; conceptually widens to Supplier and Function + @Test + def test_DoubleSupplier(): Unit = { + val dsup = new DoubleSupplier{ def getAsDouble = 22.0/7 } + val sdsu = () => 22.0/7 + def foo(ds: DoubleSupplier) = ds.getAsDouble + def bar(f: () => Double) = foo(f.asJava) + def baz(ds: DoubleSupplier) = bar(ds.asScala) + assertEquals(foo(dsup), bar(sdsu), 1e-9) + assertEquals(foo(dsup), baz(dsup), 1e-9) + assertEquals(foo(dsup), bar(asScalaFromDoubleSupplier(dsup)), 1e-9) + assertEquals(foo(dsup), baz(asJavaDoubleSupplier(sdsu)), 1e-9) + } + + // DoubleToIntFunction tests; conceptually widens to DoubleFunction and Function + @Test + def test_DoubleToIntFunction(): Unit = { + val d2if = new DoubleToIntFunction{ def applyAsInt(value: Double) = math.ceil(value).toInt } + val sd2i = (d: Double) => math.ceil(d).toInt + def foo(di: DoubleToIntFunction)(d: Double) = di.applyAsInt(d) + def bar(f: Double => Int)(d: Double) = foo(f.asJava)(d) + def baz(di: DoubleToIntFunction)(d: Double) = bar(di.asScala)(d) + assertEquals(foo(d2if)(nnn), bar(sd2i)(nnn)) + assertEquals(foo(d2if)(nnn), baz(d2if)(nnn)) + assertEquals(foo(d2if)(nnn), bar(asScalaFromDoubleToIntFunction(d2if))(nnn)) + assertEquals(foo(d2if)(nnn), baz(asJavaDoubleToIntFunction(sd2i))(nnn)) + } + + // DoubleToLongFunction tests; conceptually widens to DoubleFunction and Function + @Test + def test_DoubleToLongFunction(): Unit = { + val d2lf = new DoubleToLongFunction{ def applyAsLong(value: Double) = java.lang.Double.doubleToRawLongBits(value) } + val sd2l = (d: Double) => java.lang.Double.doubleToRawLongBits(d) + def foo(dl: DoubleToLongFunction)(d: Double) = dl.applyAsLong(d) + def bar(f: Double => Long)(d: Double) = foo(f.asJava)(d) + def baz(dl: DoubleToLongFunction)(d: Double) = bar(dl.asScala)(d) + assertEquals(foo(d2lf)(nnn), bar(sd2l)(nnn)) + assertEquals(foo(d2lf)(nnn), baz(d2lf)(nnn)) + assertEquals(foo(d2lf)(nnn), bar(asScalaFromDoubleToLongFunction(d2lf))(nnn)) + assertEquals(foo(d2lf)(nnn), baz(asJavaDoubleToLongFunction(sd2l))(nnn)) + } + + // DoubleUnaryOperator tests; conceptually widens to DoubleFunction and ToDoubleFunction and Function + @Test + def test_DoubleUnaryOperator(): Unit = { + val duop = new DoubleUnaryOperator{ def applyAsDouble(value: Double) = 1.0 - value } + val sduo = (d: Double) => 1.0 - d + def foo(du: DoubleUnaryOperator)(d: Double) = du.applyAsDouble(d) + def bar(f: Double => Double)(d: Double) = foo(f.asJava)(d) + def baz(du: DoubleUnaryOperator)(d: Double) = bar(du.asScala)(d) + assertEquals(foo(duop)(nnn), bar(sduo)(nnn), 1e-9) + assertEquals(foo(duop)(nnn), baz(duop)(nnn), 1e-9) + assertEquals(foo(duop)(nnn), bar(asScalaFromDoubleUnaryOperator(duop))(nnn), 1e-9) + assertEquals(foo(duop)(nnn), baz(asJavaDoubleUnaryOperator(sduo))(nnn), 1e-9) + } + + // Function tests; conceptually narrows to everything except BiFunction and its conceptual subclasses + @Test + def test_Function(): Unit = { + val fun1 = new Function[String, File]{ def apply(s: String): File = new File(s) } + val fun2 = new Function[Int, Long]{ def apply(i: Int): Long = ((i.toLong)<<32) | i } + def sfun = (s: String) => new File(s) + def zfun = (i: Int) => (i.toLong << 32) | i + def jf1(f: Function[String, File])(s: String) = f.apply(s) + def jf2(f: Function[Int, Long])(i: Int) = f.apply(i) + def sf1(f: String => File)(s: String) = f(s) + def sf2(f: Int => Long)(i: Int) = f(i) + val ans = fun1(str) + assertEquals(ans, sfun(str)) + assertEquals(ans, jf1(fun1)(str)) + assertEquals(ans, sf1(sfun)(str)) + assertEquals(ans, jf1(sfun.asJava)(str)) + assertEquals(ans, sf1(fun1.asScala)(str)) + assertEquals(ans, jf1(asJavaFunction(sfun))(str)) + assertEquals(ans, sf1(asScalaFromFunction(fun1))(str)) + val anz = fun2(num) + assertEquals(anz, zfun(num)) + assertEquals(anz, jf2(fun2)(num)) + assertEquals(anz, sf2(zfun)(num)) + // assertEquals(anz, jf2(zfun.asJava)(num)) -- IntToLongFunction + assertEquals(anz, sf2(fun2.asScala)(num)) + assertEquals(anz, jf2(asJavaFunction(zfun))(num)) + assertEquals(anz, sf2(asScalaFromFunction(fun2))(num)) + } + + // IntBinaryOperator tests; conceptually widens to BinaryOperator, ToIntBiFunction, and BiFunction + @Test + def test_IntBinaryOperator(): Unit = { + val ibop = new IntBinaryOperator{ def applyAsInt(a: Int, b: Int) = a ^ b } + val sibo = (i: Int, j: Int) => i ^ j + def foo(ibo: IntBinaryOperator)(a: Int, b: Int) = ibo.applyAsInt(a,b) + def bar(f: (Int, Int) => Int)(a: Int, b: Int) = foo(f.asJava)(a,b) + def baz(ibo: IntBinaryOperator)(a: Int, b: Int) = bar(ibo.asScala)(a,b) + assertEquals(foo(ibop)(num, num), bar(sibo)(num, num)) + assertEquals(foo(ibop)(num, num), baz(ibop)(num, num)) + assertEquals(foo(ibop)(num, num), bar(asScalaFromIntBinaryOperator(ibop))(num, num)) + assertEquals(foo(ibop)(num, num), baz(asJavaIntBinaryOperator(sibo))(num, num)) + } + + // IntConsumer tests; conceptually widens to Consumer and Function + @Test + def test_IntConsumer(): Unit = { + val icon = new IntConsumer{ def accept(i: Int): Unit = { save(i) } } + val sico = (i: Int) => { save(i); () } + def jf(ic: IntConsumer)(d: Int) = { ic.accept(d); recall == d } + def sf(f: Int => Unit)(d: Int) = { f(d); recall == d } + assert(jf(sico.asJava)(num)) + assert(jf(asJavaIntConsumer(sico))(num)) + assert(sf(icon.asScala)(num)) + assert(sf(asScalaFromIntConsumer(icon))(num)) + } + + // IntFunction tests; conceptually widens to Function + @Test + def test_IntFunction(): Unit = { + val ifn1 = new IntFunction[String]{ def apply(i: Int) = "!"*i } + val ifn2 = new IntFunction[Long]{ def apply(i: Int) = ((i.toLong) << 32) | i } + val sifn = (i: Int) => "!"*i + val zifn = (i: Int) => (i.toLong << 32) | i + assertEquals(ifn1(num), sifn(num)) + assertEquals(ifn1(num), ifn1.asScala(num)) + assertEquals(ifn1(num), asScalaFromIntFunction(ifn1)(num)) + assertEquals(ifn1(num), sifn.asJava(num)) + assertEquals(ifn1(num), asJavaIntFunction(sifn)(num)) + assertEquals(ifn2(num), zifn(num)) + assertEquals(ifn2(num), ifn2.asScala(num)) + assertEquals(ifn2(num), asScalaFromIntFunction(ifn2)(num)) + /// assertEquals(ifn2(num), zifn.asJava(num)) -- IntToLongFunction + assertEquals(ifn2(num), asJavaIntFunction(zifn)(num)) + } + + // IntPredicate tests; conceptually widens to IntFunction, Predicate, and Function + @Test + def test_IntPredicate(): Unit = { + val iprd = new IntPredicate{ def test(i: Int) = i < 0 } + val sipr = (i: Int) => i < 0 + def foo(ip: IntPredicate)(d: Int) = ip.test(d) + def bar(f: Int => Boolean)(d: Int) = foo(f.asJava)(d) + def baz(ip: IntPredicate)(d: Int) = bar(ip.asScala)(d) + assertEquals(foo(iprd)(num), bar(sipr)(num)) + assertEquals(foo(iprd)(num), baz(iprd)(num)) + assertEquals(foo(iprd)(num), bar(asScalaFromIntPredicate(iprd))(num)) + assertEquals(foo(iprd)(num), baz(asJavaIntPredicate(sipr))(num)) + } + + // IntSupplier tests; conceptually widens to Supplier and Function + @Test + def test_IntSupplier(): Unit = { + val isup = new IntSupplier{ def getAsInt = 42 } + val sisu = () => 42 + def foo(ds: IntSupplier) = ds.getAsInt + def bar(f: () => Int) = foo(f.asJava) + def baz(ds: IntSupplier) = bar(ds.asScala) + assertEquals(foo(isup), bar(sisu)) + assertEquals(foo(isup), baz(isup)) + assertEquals(foo(isup), bar(asScalaFromIntSupplier(isup))) + assertEquals(foo(isup), baz(asJavaIntSupplier(sisu))) + } + + // IntToDoubleFunction tests; conceptually widens to ToDoubleFunction, IntFunction, and Function + @Test + def test_IntToDoubleFunction(): Unit = { + val i2df = new IntToDoubleFunction{ def applyAsDouble(i: Int) = i + 0.1*i } + def si2d = (i: Int) => i + 0.1*i + def foo(id: IntToDoubleFunction)(i: Int) = id.applyAsDouble(i) + def bar(f: Int => Double)(i: Int) = foo(f.asJava)(i) + def baz(id: IntToDoubleFunction)(i: Int) = bar(id.asScala)(i) + assertEquals(foo(i2df)(num), bar(si2d)(num), 1e-9) + assertEquals(foo(i2df)(num), baz(i2df)(num), 1e-9) + assertEquals(foo(i2df)(num), bar(asScalaFromIntToDoubleFunction(i2df))(num), 1e-9) + assertEquals(foo(i2df)(num), baz(asJavaIntToDoubleFunction(si2d))(num), 1e-9) + } + + // IntToLongFunction tests; conceptually widens to ToLongFunction, IntFunction, and Function + @Test + def test_IntToLongFunction(): Unit = { + val i2lf = new IntToLongFunction { def applyAsLong(i: Int) = (i.toLong << 32) | i } + val si2l = (i: Int) => (i.toLong << 32) | i + def foo(il: IntToLongFunction)(d: Int) = il.applyAsLong(d) + def bar(f: Int => Long)(d: Int) = foo(f.asJava)(d) + def baz(il: IntToLongFunction)(d: Int) = bar(il.asScala)(d) + assertEquals(foo(i2lf)(num), bar(si2l)(num)) + assertEquals(foo(i2lf)(num), baz(i2lf)(num)) + assertEquals(foo(i2lf)(num), bar(asScalaFromIntToLongFunction(i2lf))(num)) + assertEquals(foo(i2lf)(num), baz(asJavaIntToLongFunction(si2l))(num)) + } + + // IntUnaryOperator tests; conceptually widens to ToIntFunction, IntFunction, and Function + @Test + def test_IntUnaryOperator(): Unit = { + val iuop = new IntUnaryOperator{ def applyAsInt(i: Int) = ~i } + val siuo = (i: Int) => ~i + def foo(iu: IntUnaryOperator)(d: Int) = iu.applyAsInt(d) + def bar(f: Int => Int)(d: Int) = foo(f.asJava)(d) + def baz(iu: IntUnaryOperator)(d: Int) = bar(iu.asScala)(d) + assertEquals(foo(iuop)(num), bar(siuo)(num)) + assertEquals(foo(iuop)(num), baz(iuop)(num)) + assertEquals(foo(iuop)(num), bar(asScalaFromIntUnaryOperator(iuop))(num)) + assertEquals(foo(iuop)(num), baz(asJavaIntUnaryOperator(siuo))(num)) + } + + // LongBinaryOperator tests; conceptually widens to ToLongFunction, LongFunction, and Function + @Test + def test_LongBinaryOperator(): Unit = { + val lbop = new LongBinaryOperator{ def applyAsLong(a: Long, b: Long) = a | b } + val slbo = (a: Long, b: Long) => a | b + def foo(lbo: LongBinaryOperator)(a: Long, b: Long) = lbo.applyAsLong(a,b) + def bar(f: (Long, Long) => Long)(a: Long, b: Long) = foo(f.asJava)(a,b) + def baz(lbo: LongBinaryOperator)(a: Long, b: Long) = bar(lbo.asScala)(a,b) + assertEquals(foo(lbop)(nmm, nmm), bar(slbo)(nmm, nmm)) + assertEquals(foo(lbop)(nmm, nmm), baz(lbop)(nmm, nmm)) + assertEquals(foo(lbop)(nmm, nmm), bar(asScalaFromLongBinaryOperator(lbop))(nmm, nmm)) + assertEquals(foo(lbop)(nmm, nmm), baz(asJavaLongBinaryOperator(slbo))(nmm, nmm)) + } + + // LongConsumer tests; conceptually widens to Consumer and Function + @Test + def test_LongConsumer(): Unit = { + val lcon = new LongConsumer{ def accept(l: Long): Unit = { save(l) } } + val slco = (l: Long) => { save(l); () } + def jf(lc: LongConsumer)(d: Long) = { lc.accept(d); recall == d } + def sf(f: Long => Unit)(d: Long) = { f(d); recall == d } + assert(jf(slco.asJava)(nmm)) + assert(jf(asJavaLongConsumer(slco))(nmm)) + assert(sf(lcon.asScala)(nmm)) + assert(sf(asScalaFromLongConsumer(lcon))(nmm)) + } + + // LongFunction tests; conceptually widens to Function + @Test + def test_LongFunction(): Unit = { + val lfn1 = new LongFunction[String]{ def apply(l: Long) = l.toString } + val lfn2 = new LongFunction[Int]{ def apply(l: Long) = (l & 0xFFFFFF).toInt } + val slfn = (l: Long) => l.toString + val zlfn = (l: Long) => (l & 0xFFFFFF).toInt + assertEquals(lfn1(nmm), slfn(nmm)) + assertEquals(lfn1(nmm), lfn1.asScala(nmm)) + assertEquals(lfn1(nmm), asScalaFromLongFunction(lfn1)(nmm)) + assertEquals(lfn1(nmm), slfn.asJava(nmm)) + assertEquals(lfn1(nmm), asJavaLongFunction(slfn)(nmm)) + assertEquals(lfn2(nmm), zlfn(nmm)) + assertEquals(lfn2(nmm), lfn2.asScala(nmm)) + assertEquals(lfn2(nmm), asScalaFromLongFunction(lfn2)(nmm)) + /// assertEquals(lfn2(nmm), zlfn.asJava(nmm)) -- LongToIntFunction + assertEquals(lfn2(nmm), asJavaLongFunction(zlfn)(nmm)) + } + + // LongPredicate tests; conceptually widens to LongFunction and Predicate and Function + @Test + def test_LongPredicate(): Unit = { + val lprd = new LongPredicate{ def test(l: Long) = l < 1 } + val slpr = (l: Long) => l < 1 + def foo(lp: LongPredicate)(d: Long) = lp.test(d) + def bar(f: Long => Boolean)(d: Long) = foo(f.asJava)(d) + def baz(lp: LongPredicate)(d: Long) = bar(lp.asScala)(d) + assertEquals(foo(lprd)(nmm), bar(slpr)(nmm)) + assertEquals(foo(lprd)(nmm), baz(lprd)(nmm)) + assertEquals(foo(lprd)(nmm), bar(asScalaFromLongPredicate(lprd))(nmm)) + assertEquals(foo(lprd)(nmm), baz(asJavaLongPredicate(slpr))(nmm)) + } + + // LongSupplier tests; conceptually widens to ToLongFunction and Supplier and Function + @Test + def test_LongSupplier(): Unit = { + val lsup = new LongSupplier{ def getAsLong = 1000000000000L } + val slsu = () => 1000000000000L + def foo(ls: LongSupplier) = ls.getAsLong + def bar(f: () => Long) = foo(f.asJava) + def baz(ls: LongSupplier) = bar(ls.asScala) + assertEquals(foo(lsup), bar(slsu)) + assertEquals(foo(lsup), baz(lsup)) + assertEquals(foo(lsup), bar(asScalaFromLongSupplier(lsup))) + assertEquals(foo(lsup), baz(asJavaLongSupplier(slsu))) + } + + // LongToDoubleFunction tests; conceptually widens to ToDoubleFunction, LongFunction, and Function + @Test + def test_LongToDoubleFunction(): Unit = { + val l2df = new LongToDoubleFunction{ def applyAsDouble(l: Long) = l + 1e-4*l } + def sl2d = (l: Long) => l + 1e-4*l + def foo(ld: LongToDoubleFunction)(l: Long) = ld.applyAsDouble(l) + def bar(f: Long => Double)(l: Long) = foo(f.asJava)(l) + def baz(ld: LongToDoubleFunction)(l: Long) = bar(ld.asScala)(l) + assertEquals(foo(l2df)(num), bar(sl2d)(num), 1e-9) + assertEquals(foo(l2df)(num), baz(l2df)(num), 1e-9) + assertEquals(foo(l2df)(num), bar(asScalaFromLongToDoubleFunction(l2df))(num), 1e-9) + assertEquals(foo(l2df)(num), baz(asJavaLongToDoubleFunction(sl2d))(num), 1e-9) + } + + // LongToIntFunction tests; conceptually widens to ToIntFunction, LongFunction, and Function + @Test + def test_LongToIntFunction(): Unit = { + val l2if = new LongToIntFunction{ def applyAsInt(l :Long) = (l & 0xFFFFFF).toInt } + val sl2i = (l: Long) => (l & 0xFFFFFF).toInt + def foo(li: LongToIntFunction)(l: Long) = li.applyAsInt(l) + def bar(f: Long => Int)(l: Long) = foo(f.asJava)(l) + def baz(li: LongToIntFunction)(l: Long) = bar(li.asScala)(l) + assertEquals(foo(l2if)(nmm), bar(sl2i)(nmm)) + assertEquals(foo(l2if)(nmm), baz(l2if)(nmm)) + assertEquals(foo(l2if)(nmm), bar(asScalaFromLongToIntFunction(l2if))(nmm)) + assertEquals(foo(l2if)(nmm), baz(asJavaLongToIntFunction(sl2i))(nmm)) + } + + // LongUnaryOperator tests; conceptually widens to LongFunction, ToLongFunction, and Function + @Test + def test_LongUnaryOperator(): Unit = { + val luop = new LongUnaryOperator{ def applyAsLong(l: Long) = -l } + val sluo = (l: Long) => -l + def foo(du: LongUnaryOperator)(l: Long) = du.applyAsLong(l) + def bar(f: Long => Long)(l: Long) = foo(f.asJava)(l) + def baz(du: LongUnaryOperator)(l: Long) = bar(du.asScala)(l) + assertEquals(foo(luop)(nmm), bar(sluo)(nmm)) + assertEquals(foo(luop)(nmm), baz(luop)(nmm)) + assertEquals(foo(luop)(nmm), bar(asScalaFromLongUnaryOperator(luop))(nmm)) + assertEquals(foo(luop)(nmm), baz(asJavaLongUnaryOperator(sluo))(nmm)) + } + + // ObjDoubleConsumer tests; conceptually widens to Consumer and BiFunction + @Test + def test_ObjDoubleConsumer(): Unit = { + val odc1 = new ObjDoubleConsumer[String]{ def accept(s: String, d: Double): Unit = { save((s,d)) } } + val odc2 = new ObjDoubleConsumer[Int]{ def accept(i: Int, d: Double): Unit = { save((i,d)) } } + val sodc = (s: String, d: Double) => { save((s,d)); () } + val zodc = (i: Int, d: Double) => { save((i,d)); () } + def jf1(odc: ObjDoubleConsumer[String])(s: String, d: Double) = { odc.accept(s,d); recall == ((s,d)) } + def jf2(odc: ObjDoubleConsumer[Int])(i: Int, d: Double) = { odc.accept(i,d); recall == ((i,d)) } + def sf1(f: (String, Double) => Unit)(s: String, d: Double) = { f(s,d); recall == ((s,d)) } + def sf2(f: (Int, Double) => Unit)(i: Int, d: Double) = { f(i,d); recall == ((i,d)) } + assert(jf1(odc1)(str, nnn)) + assert(jf1(sodc.asJava)(str, nnn)) + assert(jf1(asJavaObjDoubleConsumer(sodc))(str, nnn)) + assert(sf1(sodc)(str, nnn)) + assert(sf1(odc1.asScala)(str, nnn)) + assert(sf1(asScalaFromObjDoubleConsumer(odc1))(str, nnn)) + assert(jf2(odc2)(num, nnn)) + assert(jf2(zodc.asJava)(num, nnn)) + assert(jf2(asJavaObjDoubleConsumer(zodc))(num, nnn)) + assert(sf2(zodc)(num, nnn)) + assert(sf2(odc2.asScala)(num, nnn)) + assert(sf2(asScalaFromObjDoubleConsumer(odc2))(num, nnn)) + } + + // ObjIntConsumer tests; conceptually widens to Consumer and BiFunction + @Test + def test_ObjIntConsumer(): Unit = { + val oic1 = new ObjIntConsumer[String]{ def accept(s: String, i: Int): Unit = { save((s,i)) } } + val oic2 = new ObjIntConsumer[Int]{ def accept(j: Int, i: Int): Unit = { save((j,i)) } } + val soic = (s: String, i: Int) => { save((s,i)); () } + val zoic = (j: Int, i: Int) => { save((j,i)); () } + def jf1(oic: ObjIntConsumer[String])(s: String, i: Int) = { oic.accept(s,i); recall == ((s,i)) } + def jf2(oic: ObjIntConsumer[Int])(j: Int, i: Int) = { oic.accept(j,i); recall == ((j,i)) } + def sf1(f: (String, Int) => Unit)(s: String, i: Int) = { f(s,i); recall == ((s,i)) } + def sf2(f: (Int, Int) => Unit)(j: Int, i: Int) = { f(j,i); recall == ((j,i)) } + assert(jf1(oic1)(str, num)) + assert(jf1(soic.asJava)(str, num)) + assert(jf1(asJavaObjIntConsumer(soic))(str, num)) + assert(sf1(soic)(str, num)) + assert(sf1(oic1.asScala)(str, num)) + assert(sf1(asScalaFromObjIntConsumer(oic1))(str, num)) + assert(jf2(oic2)(num, num)) + assert(jf2(zoic.asJava)(num, num)) + assert(jf2(asJavaObjIntConsumer(zoic))(num, num)) + assert(sf2(zoic)(num, num)) + assert(sf2(oic2.asScala)(num, num)) + assert(sf2(asScalaFromObjIntConsumer(oic2))(num, num)) + } + + // ObjLongConsumer tests; conceptually widens to Consumer and BiFunction + @Test + def test_ObjLongConsumer(): Unit = { + val olc1 = new ObjLongConsumer[String]{ def accept(s: String, l: Long): Unit = { save((s,l)) } } + val olc2 = new ObjLongConsumer[Int]{ def accept(i: Int, l: Long): Unit = { save((i,l)) } } + val solc = (s: String, l: Long) => { save((s,l)); () } + val zolc = (i: Int, l: Long) => { save((i,l)); () } + def jf1(olc: ObjLongConsumer[String])(s: String, l: Long) = { olc.accept(s,l); recall == ((s,l)) } + def jf2(olc: ObjLongConsumer[Int])(i: Int, l: Long) = { olc.accept(i,l); recall == ((i,l)) } + def sf1(f: (String, Long) => Unit)(s: String, l: Long) = { f(s,l); recall == ((s,l)) } + def sf2(f: (Int, Long) => Unit)(i: Int, l: Long) = { f(i,l); recall == ((i,l)) } + assert(jf1(olc1)(str, nmm)) + assert(jf1(solc.asJava)(str, nmm)) + assert(jf1(asJavaObjLongConsumer(solc))(str, nmm)) + assert(sf1(solc)(str, nmm)) + assert(sf1(olc1.asScala)(str, nmm)) + assert(sf1(asScalaFromObjLongConsumer(olc1))(str, nmm)) + assert(jf2(olc2)(num, nmm)) + assert(jf2(zolc.asJava)(num, nmm)) + assert(jf2(asJavaObjLongConsumer(zolc))(num, nmm)) + assert(sf2(zolc)(num, nmm)) + assert(sf2(olc2.asScala)(num, nmm)) + assert(sf2(asScalaFromObjLongConsumer(olc2))(num, nmm)) + } + + // Predicate tests; conceptually widens to Function and narrows to IntPredicate etc. + @Test + def test_Predicate(): Unit = { + val prd1 = new Predicate[String]{ def test(s: String) = s.isEmpty } + val prd2 = new Predicate[Int]{ def test(i: Int) = i < 0 } + def sprd = (s: String) => s.isEmpty + def zprd = (i: Int) => i < 0 + def foos(p: Predicate[String])(s: String) = p.test(s) + def bars(f: String => Boolean)(s: String) = foos(f.asJava)(s) + def bazs(p: Predicate[String])(s: String) = bars(p.asScala)(s) + def fooi(p: Predicate[Int])(i: Int) = p.test(i) + def bari(f: Int => Boolean)(i: Int) = fooi(asJavaPredicate(f))(i) // .asScala gives IntPredicate + def bazi(p: Predicate[Int])(i: Int) = bari(p.asScala)(i) + assertEquals(foos(prd1)(str), bars(sprd)(str)) + assertEquals(foos(prd1)(str), bazs(prd1)(str)) + assertEquals(foos(prd1)(str), bars(asScalaFromPredicate(prd1))(str)) + assertEquals(foos(prd1)(str), bazs(asJavaPredicate(sprd))(str)) + assertEquals(fooi(prd2)(num), bari(zprd)(num)) + assertEquals(fooi(prd2)(num), bazi(prd2)(num)) + assertEquals(fooi(prd2)(num), bari(asScalaFromPredicate(prd2))(num)) + } + + // Supplier tests; conceptually widens to Function and narrows to IntSupplier etc. + @Test + def test_Supplier(): Unit = { + val sup1 = new Supplier[String]{ def get = "halibut" } + val sup2 = new Supplier[Int]{ def get = 17 } + val ssup = () => "halibut" + val zsup = () => 17 + def foos(s: Supplier[String]) = s.get + def bars(f: () => String) = foos(f.asJava) + def bazs(s: Supplier[String]) = bars(s.asScala) + def fooi(s: Supplier[Int]) = s.get + def bari(f: () => Int) = fooi(asJavaSupplier(f)) // .asScala gives IntSupplier + def bazi(s: Supplier[Int]) = bari(s.asScala) + val ans = foos(sup1) + assertEquals(ans, bars(ssup)) + assertEquals(ans, bazs(sup1)) + assertEquals(ans, bars(asScalaFromSupplier(sup1))) + assertEquals(ans, bazs(asJavaSupplier(ssup))) + val anz = fooi(sup2) + assertEquals(anz, bari(zsup)) + assertEquals(anz, bazi(sup2)) + assertEquals(anz, bari(asScalaFromSupplier(sup2))) + } + + // ToDoubleBiFunction tests; conceptually widens to BiFunction and narrows to DoubleBinaryOperator + @Test + def test_ToDoubleBiFunction(): Unit = { + { + val bfd1 = new ToDoubleBiFunction[String, File]{ def applyAsDouble(s: String, f: File) = s.length.toDouble * f.getName.length } + val sbfd = (s: String, f: File) => s.length.toDouble * f.getName.length + def jf1(tdbf: ToDoubleBiFunction[String, File])(s: String, f: File) = tdbf.applyAsDouble(s,f) + def sf1(f: (String, File) => Double)(s: String, fi: File) = f(s,fi) + val ans = jf1(bfd1)(str, fyl) + assertEquals(ans, sf1(sbfd)(str, fyl), 1e-9) + assertEquals(ans, jf1(sbfd.asJava)(str, fyl), 1e-9) + assertEquals(ans, sf1(bfd1.asScala)(str, fyl), 1e-9) + assertEquals(ans, jf1(asJavaToDoubleBiFunction(sbfd))(str, fyl), 1e-9) + assertEquals(ans, sf1(asScalaFromToDoubleBiFunction(bfd1))(str, fyl), 1e-9) + } + { + val bfd2 = new ToDoubleBiFunction[Double, File]{ def applyAsDouble(a: Double, f: File) = a * f.getName.length } + val zbfd = (a: Double, f: File) => a * f.getName.length + def jf2(tdbf: ToDoubleBiFunction[Double, File])(a: Double, f: File) = tdbf.applyAsDouble(a,f) + def sf2(f: (Double, File) => Double)(a: Double, fi: File) = f(a,fi) + val ans = jf2(bfd2)(nnn, fyl) + assertEquals(ans, sf2(zbfd)(nnn, fyl), 1e-9) + assertEquals(ans, jf2(zbfd.asJava)(nnn, fyl), 1e-9) + assertEquals(ans, sf2(bfd2.asScala)(nnn, fyl), 1e-9) + assertEquals(ans, jf2(asJavaToDoubleBiFunction(zbfd))(nnn, fyl), 1e-9) + assertEquals(ans, sf2(asScalaFromToDoubleBiFunction(bfd2))(nnn, fyl), 1e-9) + } + } + + + // ToDoubleFunction tests; conceptually widens to Function and narrows to DoubleUnaryOperator, IntToDoubleFunction, etc. + @Test + def test_ToDoubleFunction(): Unit = { + { + val fnd1 = new ToDoubleFunction[String]{ def applyAsDouble(s: String) = s.length / (s.headOption.getOrElse(0: Char)+1).toDouble } + val sfnd = (s: String) => s.length / (s.headOption.getOrElse(0: Char)+1).toDouble + def jf1(tdf: ToDoubleFunction[String])(s: String) = tdf.applyAsDouble(s) + def sf1(f: String => Double)(s: String) = f(s) + val ans = jf1(fnd1)(str) + assertEquals(ans, sf1(sfnd)(str), 1e-9) + assertEquals(ans, jf1(sfnd.asJava)(str), 1e-9) + assertEquals(ans, sf1(fnd1.asScala)(str), 1e-9) + assertEquals(ans, jf1(asJavaToDoubleFunction(sfnd))(str), 1e-9) + assertEquals(ans, sf1(asScalaFromToDoubleFunction(fnd1))(str), 1e-9) + } + { + val fnd2 = new ToDoubleFunction[Double]{ def applyAsDouble(d: Double) = 1.0 - d } + val zfnd = (d: Double) => 1.0 - d + def jf2(tdf: ToDoubleFunction[Double])(x: Double) = tdf.applyAsDouble(x) + def sf2(f: Double => Double)(x: Double) = f(x) + val ans = jf2(fnd2)(nnn) + assertEquals(ans, sf2(zfnd)(nnn), 1e-9) + // assertEquals(ans, jf2(znfd.asJava)(nnn), 1e-9) -- DoubleUnaryOperator + assertEquals(ans, sf2(asScalaFromToDoubleFunction(fnd2))(nnn), 1e-9) + assertEquals(ans, jf2(asJavaToDoubleFunction(zfnd))(nnn), 1e-9) + } + } + + // ToIntBiFunction tests; conceptually widens to BiFunction and narrows to IntBinaryOperator + @Test + def test_ToIntBiFunction(): Unit = { + { + val bfi1 = new ToIntBiFunction[String, File]{ def applyAsInt(s: String, f: File) = s.length + f.getName.length } + val sbfi = (s: String, f: File) => s.length.toInt + f.getName.length + def jf1(tdbf: ToIntBiFunction[String, File])(s: String, f: File) = tdbf.applyAsInt(s,f) + def sf1(f: (String, File) => Int)(s: String, fi: File) = f(s,fi) + val ans = jf1(bfi1)(str, fyl) + assertEquals(ans, sf1(sbfi)(str, fyl)) + assertEquals(ans, jf1(sbfi.asJava)(str, fyl)) + assertEquals(ans, sf1(bfi1.asScala)(str, fyl)) + assertEquals(ans, jf1(asJavaToIntBiFunction(sbfi))(str, fyl)) + assertEquals(ans, sf1(asScalaFromToIntBiFunction(bfi1))(str, fyl)) + } + { + val bfi2 = new ToIntBiFunction[Int, File]{ def applyAsInt(i: Int, f: File) = i * f.getName.length } + val zbfi = (a: Int, f: File) => a * f.getName.length + def jf2(tdbf: ToIntBiFunction[Int, File])(a: Int, f: File) = tdbf.applyAsInt(a,f) + def sf2(f: (Int, File) => Int)(a: Int, fi: File) = f(a,fi) + val ans = jf2(bfi2)(num, fyl) + assertEquals(ans, sf2(zbfi)(num, fyl)) + assertEquals(ans, jf2(zbfi.asJava)(num, fyl)) + assertEquals(ans, sf2(bfi2.asScala)(num, fyl)) + assertEquals(ans, jf2(asJavaToIntBiFunction(zbfi))(num, fyl)) + assertEquals(ans, sf2(asScalaFromToIntBiFunction(bfi2))(num, fyl)) + } + } + + // ToIntFunction tests; conceptually widens to Function and narrows to IntUnaryOperator, etc.. + @Test + def test_ToIntFunction(): Unit = { + { + val fni1 = new ToIntFunction[String]{ def applyAsInt(s: String) = s.length } + val sfni = (s: String) => s.length + def jf1(tdf: ToIntFunction[String])(s: String) = tdf.applyAsInt(s) + def sf1(f: String => Int)(s: String) = f(s) + val ans = jf1(fni1)(str) + assertEquals(ans, sf1(sfni)(str)) + assertEquals(ans, jf1(sfni.asJava)(str)) + assertEquals(ans, sf1(fni1.asScala)(str)) + assertEquals(ans, jf1(asJavaToIntFunction(sfni))(str)) + assertEquals(ans, sf1(asScalaFromToIntFunction(fni1))(str)) + } + { + val fni2 = new ToIntFunction[Int]{ def applyAsInt(i: Int) = -i } + val zfni = (x: Int) => -x + def jf2(tdf: ToIntFunction[Int])(x: Int) = tdf.applyAsInt(x) + def sf2(f: Int => Int)(x: Int) = f(x) + val ans = jf2(fni2)(num) + assertEquals(ans, sf2(zfni)(num)) + // assertEquals(ans, jf2(znfd.asJava)(num)) -- IntUnaryOperator + assertEquals(ans, sf2(asScalaFromToIntFunction(fni2))(num)) + assertEquals(ans, jf2(asJavaToIntFunction(zfni))(num)) + } + } + + // ToLongBiFunction tests; conceptually widens to BiFunction and narrows to LongBinaryOperator + @Test + def test_ToLongBiFunction(): Unit = { + { + val bfl1 = new ToLongBiFunction[String, File]{ def applyAsLong(s: String, f: File) = s.length * f.getName.length } + val sbfl = (s: String, f: File) => s.length.toLong * f.getName.length + def jf1(tdbf: ToLongBiFunction[String, File])(s: String, f: File) = tdbf.applyAsLong(s,f) + def sf1(f: (String, File) => Long)(s: String, fi: File) = f(s,fi) + val ans = jf1(bfl1)(str, fyl) + assertEquals(ans, sf1(sbfl)(str, fyl)) + assertEquals(ans, jf1(sbfl.asJava)(str, fyl)) + assertEquals(ans, sf1(bfl1.asScala)(str, fyl)) + assertEquals(ans, jf1(asJavaToLongBiFunction(sbfl))(str, fyl)) + assertEquals(ans, sf1(asScalaFromToLongBiFunction(bfl1))(str, fyl)) + } + { + val bfl2 = new ToLongBiFunction[Long, File]{ def applyAsLong(l: Long, f: File) = l ^ f.getName.length } + val zbfl = (a: Long, f: File) => a ^ f.getName.length + def jf2(tdbf: ToLongBiFunction[Long, File])(a: Long, f: File) = tdbf.applyAsLong(a,f) + def sf2(f: (Long, File) => Long)(a: Long, fi: File) = f(a,fi) + val ans = jf2(bfl2)(nmm, fyl) + assertEquals(ans, sf2(zbfl)(nmm, fyl)) + assertEquals(ans, jf2(zbfl.asJava)(nmm, fyl)) + assertEquals(ans, sf2(bfl2.asScala)(nmm, fyl)) + assertEquals(ans, jf2(asJavaToLongBiFunction(zbfl))(nmm, fyl)) + assertEquals(ans, sf2(asScalaFromToLongBiFunction(bfl2))(nmm, fyl)) + } + } + + // ToLongFunction tests; conceptually widens to Function and narrows to LongUnaryOperator, LongToIntFunction etc.. + @Test + def test_ToLongFunction(): Unit = { + { + val fnl1 = new ToLongFunction[String]{ def applyAsLong(s: String) = s.length.toLong << 16 } + val sfnl = (s: String) => s.length.toLong << 16 + def jf1(tdf: ToLongFunction[String])(s: String) = tdf.applyAsLong(s) + def sf1(f: String => Long)(s: String) = f(s) + val ans = jf1(fnl1)(str) + assertEquals(ans, sf1(sfnl)(str)) + assertEquals(ans, jf1(sfnl.asJava)(str)) + assertEquals(ans, sf1(fnl1.asScala)(str)) + assertEquals(ans, jf1(asJavaToLongFunction(sfnl))(str)) + assertEquals(ans, sf1(asScalaFromToLongFunction(fnl1))(str)) + } + { + val fnl2 = new ToLongFunction[Long]{ def applyAsLong(l: Long) = 2 - l } + val zfnl = (x: Long) => 2 - x + def jf2(tdf: ToLongFunction[Long])(x: Long) = tdf.applyAsLong(x) + def sf2(f: Long => Long)(x: Long) = f(x) + val ans = jf2(fnl2)(num) + assertEquals(ans, sf2(zfnl)(num)) + // assertEquals(ans, jf2(znfd.asJava)(num)) -- LongUnaryOperator + assertEquals(ans, sf2(asScalaFromToLongFunction(fnl2))(num)) + assertEquals(ans, jf2(asJavaToLongFunction(zfnl))(num)) + } + } + + // UnaryOperator tests; actually widens to Function and conceptually narrows to IntUnaryOperator etc.. + @Test + def test_UnaryOperator(): Unit = { + { + val uop1 = new UnaryOperator[String]{ def apply(s: String) = s.toUpperCase } + val suop = (s: String) => s.toUpperCase + def foo(uo: UnaryOperator[String])(s: String) = uo(s) + def bar(f: String => String)(s: String) = foo(f.asJava)(s) + def baz(uo: UnaryOperator[String])(s: String) = bar(uo.asScala)(s) + assertEquals(foo(uop1)(str), bar(suop)(str)) + assertEquals(foo(uop1)(str), baz(uop1)(str)) + assertEquals(foo(uop1)(str), bar(asScalaFromUnaryOperator(uop1))(str)) + assertEquals(foo(uop1)(str), baz(asJavaUnaryOperator(suop))(str)) + } + { + val uop2 = new UnaryOperator[Int]{ def apply(i: Int) = -i } + def zuop = (i: Int) => -i + def foo(uo: UnaryOperator[Int])(i: Int) = uo(i) + def bar(f: Int => Int)(i: Int) = foo(asJavaUnaryOperator(f))(i) // .asScala gives IntUnaryOperator + def baz(uo: UnaryOperator[Int])(i: Int) = bar(uo.asScala)(i) + assertEquals(foo(uop2)(num), bar(zuop)(num)) + assertEquals(foo(uop2)(num), baz(uop2)(num)) + assertEquals(foo(uop2)(num), bar(asScalaFromUnaryOperator(uop2))(num)) + } + } +} diff --git a/src/test/scala/scala/compat/java8/Issue247Test.scala b/src/test/scala/scala/compat/java8/Issue247Test.scala new file mode 100644 index 0000000..6a2ddc0 --- /dev/null +++ b/src/test/scala/scala/compat/java8/Issue247Test.scala @@ -0,0 +1,83 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8 + +import org.junit.Assert._ +import org.junit.Assume.assumeTrue +import org.junit.Test +import org.junit.function.ThrowingRunnable + +import java.nio.file.{Files, Paths} +import scala.compat.java8.StreamConverters._ +import scala.compat.java8.issue247.Main +import scala.sys.process._ +import scala.util.Try + +class Issue247Test { + @Test + def runMainDirectly(): Unit = Main.main(Array.empty) + + val mainCls = "scala.compat.java8.issue247.Main" + + @Test + def runMainMatrix(): Unit = { + assumeTrue("only run in Linux/OSX", "which which".! == 0) + + val pwd = "pwd".!!.trim + + val coursier = Try { + ("which cs" #|| "which coursier").!!.trim + }.getOrElse { + val cs = s"$pwd/target/coursier" + if (!Files.isExecutable(Paths.get(cs))) + ( s"curl -fLo $cs https://git.io/coursier-cli" #&& + s"chmod +x $cs" + ).!.ensuring(_ == 0) + cs + } + + for { + scalaBinV <- Seq("2.11", "2.12", "2.13", "3") + compatV <- Seq("0.9.1", "1.0.0", "1.0.1") + // scala-java8-compat for scala3 don't have version 0.9.1 + if scalaBinV != "3" || compatV != "0.9.1" + scalaDir <- Files.list(Paths.get(pwd, "target")).toScala[List] + if scalaDir.toFile.getName.startsWith(s"scala-$scalaBinV") + classesDir = scalaDir.resolve("test-classes") + if classesDir.resolve("scala/compat/java8/issue247/Main.class").toFile.isFile + } { + val classpath = Process( + Seq( + coursier, // may contain spaces + "fetch", "--classpath", + s"org.scala-lang.modules:scala-java8-compat_$scalaBinV:$compatV" + ) + ).!!.trim + + val testCmd = s"java -cp $classpath:$classesDir $mainCls" + + val run: ThrowingRunnable = new ThrowingRunnable { + def run(): Unit = { + println(testCmd) + testCmd.!! + } + } + + if ((scalaBinV, compatV) == ("2.13", "0.9.1")) { + run.run() // no Exception + } else { + assertThrows(classOf[RuntimeException], run) + } + } + } +} diff --git a/src/test/scala/scala/compat/java8/issue247/Main.scala b/src/test/scala/scala/compat/java8/issue247/Main.scala new file mode 100644 index 0000000..25fcc29 --- /dev/null +++ b/src/test/scala/scala/compat/java8/issue247/Main.scala @@ -0,0 +1,27 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.compat.java8.issue247 + +import scala.compat.java8.FunctionConverters._ +import java.util.function.IntFunction + +object Main { + def invoke(jfun: IntFunction[String]): String = jfun(2) + + def main(args: Array[String]): Unit = { + val sfun = (i: Int) => s"ret: $i" + val ret = invoke(sfun.asJava) + assert(ret == "ret: 2") + println(s"OK. $ret") + } +}