diff --git a/desktop/src/main/java/bisq/desktop/components/chart/ChartDataModel.java b/desktop/src/main/java/bisq/desktop/components/chart/ChartDataModel.java index 18875498bc..236bd7085b 100644 --- a/desktop/src/main/java/bisq/desktop/components/chart/ChartDataModel.java +++ b/desktop/src/main/java/bisq/desktop/components/chart/ChartDataModel.java @@ -23,7 +23,9 @@ import java.time.Instant; import java.time.temporal.TemporalAdjuster; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import java.util.function.BinaryOperator; +import java.util.function.Function; import java.util.function.Predicate; import java.util.function.ToLongFunction; import java.util.stream.Collectors; @@ -89,13 +91,20 @@ public abstract class ChartDataModel extends ActivatableDataModel { protected abstract void invalidateCache(); - protected Map getMergedMap(Map map1, - Map map2, - BinaryOperator mergeFunction) { - return Stream.concat(map1.entrySet().stream(), - map2.entrySet().stream()) - .collect(Collectors.toMap(Map.Entry::getKey, - Map.Entry::getValue, - mergeFunction)); + + /////////////////////////////////////////////////////////////////////////////////////////// + // Utils + /////////////////////////////////////////////////////////////////////////////////////////// + + protected static Function memoize(Function fn) { + Map map = new ConcurrentHashMap<>(); + return x -> map.computeIfAbsent(x, fn); + } + + protected static Map getMergedMap(Map map1, + Map map2, + BinaryOperator mergeFunction) { + return Stream.concat(map1.entrySet().stream(), map2.entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, mergeFunction)); } } diff --git a/desktop/src/main/java/bisq/desktop/main/dao/economy/dashboard/price/PriceChartDataModel.java b/desktop/src/main/java/bisq/desktop/main/dao/economy/dashboard/price/PriceChartDataModel.java index 8880a352ce..b78534a1ac 100644 --- a/desktop/src/main/java/bisq/desktop/main/dao/economy/dashboard/price/PriceChartDataModel.java +++ b/desktop/src/main/java/bisq/desktop/main/dao/economy/dashboard/price/PriceChartDataModel.java @@ -32,14 +32,12 @@ import javax.inject.Inject; import java.time.Instant; import java.util.AbstractMap; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicReference; +import java.util.NavigableMap; +import java.util.TreeMap; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -47,8 +45,6 @@ import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; -import org.jetbrains.annotations.NotNull; - @Slf4j public class PriceChartDataModel extends ChartDataModel { private final TradeStatisticsManager tradeStatisticsManager; @@ -266,7 +262,7 @@ public class PriceChartDataModel extends ChartDataModel { } private Map getBsqMarketCapByInterval(Predicate collectionFilter, - Function, Double> getAveragePriceFunction) { + Function, Double> getAveragePriceFunction) { var toTimeIntervalFn = toCachedTimeIntervalFn(); return getBsqMarketCapByInterval(tradeStatisticsManager.getObservableTradeStatisticsSet(), collectionFilter, @@ -276,92 +272,58 @@ public class PriceChartDataModel extends ChartDataModel { } private Map getBsqMarketCapByInterval(Collection tradeStatistics3s, - Predicate collectionFilter, - Function groupByDateFunction, - Predicate dateFilter, - Function, Double> getAveragePriceFunction) { - + Predicate collectionFilter, + Function groupByDateFunction, + Predicate dateFilter, + Function, Double> getAveragePriceFunction) { Map> pricesGroupedByDate = tradeStatistics3s.stream() .filter(collectionFilter) .collect(Collectors.groupingBy(groupByDateFunction)); - Stream>> filteredByDate = - pricesGroupedByDate.entrySet().stream() - .filter(entry -> dateFilter.test(entry.getKey())); + Stream>> filteredByDate = pricesGroupedByDate.entrySet().stream() + .filter(entry -> dateFilter.test(entry.getKey())); Map resultsByDateBucket = filteredByDate .map(entry -> new AbstractMap.SimpleEntry<>( entry.getKey(), getAveragePriceFunction.apply(entry.getValue()))) .filter(e -> e.getValue() > 0d) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (u, v) -> v, HashMap::new)); // apply the available BSQ to the data set - Map totalSupplyByInterval = getOutstandingBsqByInterval(); - resultsByDateBucket.keySet().forEach(dateKey -> { - Double availableBsq = issuanceAsOfDate(totalSupplyByInterval, dateKey)/100; - resultsByDateBucket.put(dateKey, resultsByDateBucket.get(dateKey) * availableBsq); // market cap (price * available BSQ) + NavigableMap totalSupplyByInterval = getOutstandingBsqByInterval(); + resultsByDateBucket.replaceAll((dateKey, result) -> { + double availableBsq = issuanceAsOfDate(totalSupplyByInterval, dateKey) / 100d; + return result * availableBsq; // market cap (price * available BSQ) }); return resultsByDateBucket; } - private Double issuanceAsOfDate(@NotNull Map totalSupplyByInterval, Long dateKey) { - ArrayList list = new ArrayList<>(totalSupplyByInterval.keySet()); - list.sort(Collections.reverseOrder()); - Optional foundKey = list.stream() - .filter(d -> dateKey >= d) - .findFirst(); - if (foundKey.isPresent()) { - return totalSupplyByInterval.get(foundKey.get()); - } - return 0.0; + private double issuanceAsOfDate(NavigableMap totalSupplyByInterval, long dateKey) { + var entry = totalSupplyByInterval.floorEntry(dateKey); + return entry != null ? entry.getValue() : 0d; } - private Map getOutstandingBsqByInterval() { + private NavigableMap getOutstandingBsqByInterval() { Stream txStream = daoStateService.getBlocks().stream() .flatMap(b -> b.getTxs().stream()) .filter(tx -> tx.getBurntFee() > 0); Map simpleBurns = txStream - .collect(Collectors.groupingBy(tx -> - toTimeInterval(Instant.ofEpochMilli(tx.getTime())))) - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, - entry -> entry.getValue().stream() - .mapToDouble(Tx::getBurntBsq) - .sum())); - simpleBurns.forEach((k,v) -> simpleBurns.put(k, -v)); + .collect(Collectors.groupingBy( + tx -> toTimeInterval(Instant.ofEpochMilli(tx.getTime())), + Collectors.summingDouble(Tx::getBurntBsq))); + simpleBurns.replaceAll((k, v) -> -v); Collection issuanceSet = daoStateService.getIssuanceItems(); Map simpleIssuance = issuanceSet.stream() - .collect(Collectors.groupingBy(issuance -> - toTimeInterval(Instant.ofEpochMilli(blockTimeOfIssuanceFunction.apply(issuance))))) - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, - entry -> entry.getValue().stream() - .mapToDouble(Issuance::getAmount) - .sum())); + .collect(Collectors.groupingBy( + issuance -> toTimeInterval(Instant.ofEpochMilli(blockTimeOfIssuanceFunction.apply(issuance))), + Collectors.summingDouble(Issuance::getAmount))); - Map supplyByInterval = Stream.concat(simpleIssuance.entrySet().stream(), - simpleBurns.entrySet().stream()) - .collect(Collectors.toMap(Map.Entry::getKey, - Map.Entry::getValue, - Double::sum)); + NavigableMap supplyByInterval = new TreeMap<>(getMergedMap(simpleIssuance, simpleBurns, Double::sum)); - ArrayList listCombined = new ArrayList<>(supplyByInterval.keySet()); - Collections.sort(listCombined); - AtomicReference atomicSum = new AtomicReference<>((double) (daoStateService.getGenesisTotalSupply().value)); - listCombined.forEach(k -> supplyByInterval.put(k, atomicSum.accumulateAndGet(supplyByInterval.get(k), Double::sum))); + final double[] partialSum = {daoStateService.getGenesisTotalSupply().value}; + supplyByInterval.replaceAll((k, v) -> partialSum[0] += v); return supplyByInterval; } - - /////////////////////////////////////////////////////////////////////////////////////////// - // Utils - /////////////////////////////////////////////////////////////////////////////////////////// - - private static Function memoize(Function fn) { - Map map = new ConcurrentHashMap<>(); - return x -> map.computeIfAbsent(x, fn); - } } diff --git a/desktop/src/main/java/bisq/desktop/main/dao/economy/supply/dao/DaoChartDataModel.java b/desktop/src/main/java/bisq/desktop/main/dao/economy/supply/dao/DaoChartDataModel.java index 31df6cea73..267aa82e02 100644 --- a/desktop/src/main/java/bisq/desktop/main/dao/economy/supply/dao/DaoChartDataModel.java +++ b/desktop/src/main/java/bisq/desktop/main/dao/economy/supply/dao/DaoChartDataModel.java @@ -40,8 +40,6 @@ import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.TreeMap; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -146,9 +144,7 @@ public class DaoChartDataModel extends ChartDataModel { Map reimbursementMap = getReimbursementByInterval(); Map burnFromArbitrationMap = getProofOfBurnFromArbitrationByInterval(); Map mergedMap = getMergedMap(reimbursementMap, burnFromArbitrationMap, (a, b) -> a - b); - arbitrationDiffByInterval = mergedMap.entrySet().stream() - .filter(e -> getPostTagDateFilter().test(e.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + arbitrationDiffByInterval = getDateFilteredMap(mergedMap, getPostTagDateFilter()); return arbitrationDiffByInterval; } @@ -162,9 +158,7 @@ public class DaoChartDataModel extends ChartDataModel { Map tradeFee = getBsqTradeFeeByInterval(); Map proofOfBurn = getProofOfBurnFromBtcFeesByInterval(); Map merged = getMergedMap(tradeFee, proofOfBurn, Long::sum); - totalTradeFeesByInterval = merged.entrySet().stream() - .filter(entry -> entry.getKey() * 1000 >= TAG_DATE.getTimeInMillis()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + totalTradeFeesByInterval = getDateFilteredMap(merged, e -> e * 1000 >= TAG_DATE.getTimeInMillis()); return totalTradeFeesByInterval; } @@ -207,9 +201,7 @@ public class DaoChartDataModel extends ChartDataModel { return reimbursementByIntervalAfterTagging; } - reimbursementByIntervalAfterTagging = getReimbursementByInterval().entrySet().stream() - .filter(e -> getPostTagDateFilter().test(e.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + reimbursementByIntervalAfterTagging = getDateFilteredMap(getReimbursementByInterval(), getPostTagDateFilter()); return reimbursementByIntervalAfterTagging; } @@ -236,9 +228,7 @@ public class DaoChartDataModel extends ChartDataModel { return bsqTradeFeeByIntervalAfterTagging; } - bsqTradeFeeByIntervalAfterTagging = getBsqTradeFeeByInterval().entrySet().stream() - .filter(e -> getPostTagDateFilter().test(e.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + bsqTradeFeeByIntervalAfterTagging = getDateFilteredMap(getBsqTradeFeeByInterval(), getPostTagDateFilter()); return bsqTradeFeeByIntervalAfterTagging; } @@ -256,17 +246,14 @@ public class DaoChartDataModel extends ChartDataModel { return miscBurnByInterval; } - miscBurnByInterval = daoStateService.getBurntFeeTxs().stream() + Map allMiscBurnByInterval = getBurntFeeTxStream() .filter(e -> e.getTxType() != TxType.PAY_TRADE_FEE) .filter(e -> e.getTxType() != TxType.PROOF_OF_BURN) - .collect(Collectors.groupingBy(tx -> toTimeInterval(Instant.ofEpochMilli(tx.getTime())))) - .entrySet() - .stream() - .filter(entry -> dateFilter.test(entry.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, - entry -> entry.getValue().stream() - .mapToLong(Tx::getBurntBsq) - .sum())); + .collect(Collectors.groupingBy( + tx -> toTimeInterval(Instant.ofEpochMilli(tx.getTime())), + Collectors.summingLong(Tx::getBurntBsq))); + + miscBurnByInterval = getDateFilteredMap(allMiscBurnByInterval, dateFilter); return miscBurnByInterval; } @@ -308,21 +295,14 @@ public class DaoChartDataModel extends ChartDataModel { Collection issuanceSetForType = daoStateService.getIssuanceItems(); // get all issued and burnt BSQ, not just the filtered date range Map tmpIssuedByInterval = getIssuedBsqByInterval(issuanceSetForType, e -> true); - Map tmpBurnedByInterval = new TreeMap<>(getBurntBsqByInterval(getBurntFeeTxStream(), e -> true) - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> -e.getValue()))); - Map tmpSupplyByInterval = getMergedMap(tmpIssuedByInterval, tmpBurnedByInterval, Long::sum); + Map tmpBurnedByInterval = getBurntBsqByInterval(getBurntFeeTxStream(), e -> true); + tmpBurnedByInterval.replaceAll((k, v) -> -v); - totalSupplyByInterval = new TreeMap<>(tmpSupplyByInterval.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); - AtomicReference atomicSum = new AtomicReference<>(genesisValue); - totalSupplyByInterval.entrySet().forEach(e -> e.setValue( - atomicSum.accumulateAndGet(e.getValue(), Long::sum) - )); + Map tmpSupplyByInterval = new TreeMap<>(getMergedMap(tmpIssuedByInterval, tmpBurnedByInterval, Long::sum)); + final long[] partialSum = {genesisValue}; + tmpSupplyByInterval.replaceAll((k, v) -> partialSum[0] += v); // now apply the requested date filter - totalSupplyByInterval = totalSupplyByInterval.entrySet().stream() - .filter(e -> dateFilter.test(e.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - + totalSupplyByInterval = getDateFilteredMap(tmpSupplyByInterval, dateFilter); return totalSupplyByInterval; } @@ -332,8 +312,8 @@ public class DaoChartDataModel extends ChartDataModel { } Map issued = getTotalIssuedByInterval(); - Map burned = new TreeMap<>(getTotalBurnedByInterval().entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> -e.getValue()))); + Map burned = getTotalBurnedByInterval(); + burned.replaceAll((k, v) -> -v); supplyChangeByInterval = getMergedMap(issued, burned, Long::sum); return supplyChangeByInterval; } @@ -343,21 +323,15 @@ public class DaoChartDataModel extends ChartDataModel { /////////////////////////////////////////////////////////////////////////////////////////// private Map getIssuedBsqByInterval(Collection issuanceSet, Predicate dateFilter) { - return issuanceSet.stream() - .collect(Collectors.groupingBy(issuance -> - toTimeInterval(Instant.ofEpochMilli(blockTimeOfIssuanceFunction.apply(issuance))))) - .entrySet() - .stream() - .filter(entry -> dateFilter.test(entry.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, - entry -> entry.getValue().stream() - .mapToLong(Issuance::getAmount) - .sum())); + var allIssuedBsq = issuanceSet.stream() + .collect(Collectors.groupingBy( + issuance -> toTimeInterval(Instant.ofEpochMilli(blockTimeOfIssuanceFunction.apply(issuance))), + Collectors.summingLong(Issuance::getAmount))); + return getDateFilteredMap(allIssuedBsq, dateFilter); } private Map getHistoricalIssuedBsqByInterval(Map historicalData, Predicate dateFilter) { - return historicalData.entrySet().stream() .filter(e -> dateFilter.test(e.getKey())) .collect(Collectors.toMap(e -> toTimeInterval(Instant.ofEpochSecond(e.getKey())), @@ -367,15 +341,10 @@ public class DaoChartDataModel extends ChartDataModel { private Map getBurntBsqByInterval(Stream txStream, Predicate dateFilter) { var toTimeIntervalFn = toCachedTimeIntervalFn(); - return txStream - .collect(Collectors.groupingBy(tx -> toTimeIntervalFn.applyAsLong(Instant.ofEpochMilli(tx.getTime())))) - .entrySet() - .stream() - .filter(entry -> dateFilter.test(entry.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, - entry -> entry.getValue().stream() - .mapToLong(Tx::getBurntBsq) - .sum())); + var allBurntBsq = txStream.collect(Collectors.groupingBy( + tx -> toTimeIntervalFn.applyAsLong(Instant.ofEpochMilli(tx.getTime())), + Collectors.summingLong(Tx::getBurntBsq))); + return getDateFilteredMap(allBurntBsq, dateFilter); } private Predicate getPostTagDateFilter() { @@ -402,9 +371,10 @@ public class DaoChartDataModel extends ChartDataModel { // Utils /////////////////////////////////////////////////////////////////////////////////////////// - private static Function memoize(Function fn) { - Map map = new ConcurrentHashMap<>(); - return x -> map.computeIfAbsent(x, fn); + private static Map getDateFilteredMap(Map map, Predicate dateFilter) { + return map.entrySet().stream() + .filter(e -> dateFilter.test(e.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (u, v) -> v, HashMap::new)); }