From 04e8de2cf07e3eb22fe32d14b16f36e0bdcd8f5a Mon Sep 17 00:00:00 2001 From: Anita SV Date: Wed, 31 Jan 2024 12:38:54 -0800 Subject: [PATCH] increase hash map size to speed up 10K data set --- src/main/java/dev/morling/onebrc/CalculateAverage_vaidhy.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/dev/morling/onebrc/CalculateAverage_vaidhy.java b/src/main/java/dev/morling/onebrc/CalculateAverage_vaidhy.java index 6bda1f0f1..b06cffe95 100644 --- a/src/main/java/dev/morling/onebrc/CalculateAverage_vaidhy.java +++ b/src/main/java/dev/morling/onebrc/CalculateAverage_vaidhy.java @@ -527,7 +527,7 @@ public long address() { private static class ChunkProcessorImpl implements MapReduce { // 1 << 14 > 10,000 so it works - private final PrimitiveHashMap statistics = new PrimitiveHashMap(14); + private final PrimitiveHashMap statistics = new PrimitiveHashMap(15); @Override public void process(long keyStartAddress, long keyEndAddress, long hash, long suffix, int temperature) { @@ -573,7 +573,7 @@ private static Map toPrintMap(Map private static Map combineOutputs( List list) { - Map output = new HashMap<>(10000); + Map output = HashMap.newHashMap(10000); for (PrimitiveHashMap map : list) { for (HashEntry entry : map.entrySet()) { if (entry.value != null) {