天天看點

【評測】Java Stream一組資料讓我對它疑問

public class Test {

    public static void main(String[] args) throws InterruptedException {

        int n = 100000000;
        long result = 0;


        long begin, end;

        Thread.sleep(2000);
        begin = System.currentTimeMillis();
        for (long i = 1L; i <= n; i++) {
            result += i;
        }
        end = System.currentTimeMillis();
        System.out.println("for loop result = " + result + " time: " + (end - begin));

        Thread.sleep(2000);
        begin = System.currentTimeMillis();
        result = LongStream.rangeClosed(1, n)
                .sum();
        end = System.currentTimeMillis();
        System.out.println("LongStream sum result = " + result + " time: " + (end - begin));

        Thread.sleep(2000);
        begin = System.currentTimeMillis();
        result = Stream.iterate(1L, i -> i+1)
                .limit(n)
                .reduce(0L, Long::sum);
        end = System.currentTimeMillis();
        System.out.println("Stream iterate result = " + result + " time: " + (end - begin));

        Thread.sleep(2000);
        begin = System.currentTimeMillis();
        result = Stream.iterate(1L, i -> i+1)
                .parallel()
                .limit(n)
                .reduce(0L, Long::sum);
        end = System.currentTimeMillis();
        System.out.println("Stream iterate parallel result = " + result + " time: " + (end - begin));
    }

           

執行結果:

for loop result = 5000000050000000 time: 34
LongStream sum result = 5000000050000000 time: 92
Stream iterate result = 5000000050000000 time: 1153
Stream iterate parallel result = 5000000050000000 time: 25224
           

這樣的執行效率不知道是不是我寫錯了,還是說場景不對。

是不是隻能再巨大資料量的情況下才能展現出優勢呢!

當設定1到10億的累加就異常了。

for loop result = 500000000500000000 time: 390
LongStream sum result = 500000000500000000 time: 450
Stream iterate result = 500000000500000000 time: 9366
Exception in thread "main" java.lang.OutOfMemoryError
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at java.util.concurrent.ForkJoinTask.getThrowableException(ForkJoinTask.java:598)
	at java.util.concurrent.ForkJoinTask.reportException(ForkJoinTask.java:677)
	at java.util.concurrent.ForkJoinTask.invoke(ForkJoinTask.java:735)
	at java.util.stream.SliceOps$1.opEvaluateParallelLazy(SliceOps.java:155)
	at java.util.stream.AbstractPipeline.sourceSpliterator(AbstractPipeline.java:432)
	at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:233)
	at java.util.stream.ReferencePipeline.reduce(ReferencePipeline.java:474)
	at com.seventh.hospital.app.controller.Test.main(Test.java:65)
Caused by: java.lang.OutOfMemoryError: Java heap space
	at java.lang.Long.valueOf(Long.java:840)
	at com.seventh.hospital.app.controller.Test.lambda$main$1(Test.java:62)
	at com.seventh.hospital.app.controller.Test$$Lambda$4/1651191114.apply(Unknown Source)
	at java.util.stream.Stream$1.next(Stream.java:1033)
	at java.util.Spliterators$IteratorSpliterator.trySplit(Spliterators.java:1784)
	at java.util.stream.AbstractShortCircuitTask.compute(AbstractShortCircuitTask.java:114)
	at java.util.concurrent.CountedCompleter.exec(CountedCompleter.java:731)
	at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
	at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056)
	at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692)
	at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157)
           

歡迎補充