mirror of
https://github.com/bisq-network/bisq.git
synced 2024-11-19 09:52:23 +01:00
Always try to prune live map with historical data
It should be only needed in case we get the historical data from resources, but as I have seen multiple times that some nodes have duplicated entries in the live data I think its more safe to clean up always. If no entries are removed the call is very cheap. Even with 60k entries to be pruned it takes only about 20 ms.
This commit is contained in:
parent
eea1203ec5
commit
23df53e61b
@ -189,9 +189,7 @@ public abstract class HistoricalDataStoreService<T extends PersistableNetworkPay
|
|||||||
storesByVersion.put(version, persisted);
|
storesByVersion.put(version, persisted);
|
||||||
allHistoricalPayloads.putAll(persisted.getMap());
|
allHistoricalPayloads.putAll(persisted.getMap());
|
||||||
log.info("We have read from {} {} historical items.", fileName, persisted.getMap().size());
|
log.info("We have read from {} {} historical items.", fileName, persisted.getMap().size());
|
||||||
if (wasCreatedFromResources) {
|
pruneStore(persisted, version);
|
||||||
pruneStore(persisted, version);
|
|
||||||
}
|
|
||||||
completeHandler.run();
|
completeHandler.run();
|
||||||
},
|
},
|
||||||
() -> {
|
() -> {
|
||||||
|
Loading…
Reference in New Issue
Block a user