Merge pull request #4695 from chimp1984/cleanup-monitor-module

Cleanup monitor module
This commit is contained in:
sqrrm 2020-10-27 19:55:59 +01:00 committed by GitHub
commit 91bc88c86c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 45 additions and 31 deletions

View file

@ -81,6 +81,7 @@ public class Monitor {
// start Tor
Tor.setDefault(new NativeTor(TOR_WORKING_DIR, null, null, false));
//noinspection deprecation,deprecation,deprecation,deprecation,deprecation,deprecation,deprecation,deprecation
Capabilities.app.addAll(Capability.TRADE_STATISTICS,
Capability.TRADE_STATISTICS_2,
Capability.ACCOUNT_AGE_WITNESS,

View file

@ -17,13 +17,13 @@
package bisq.monitor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.LongSummaryStatistics;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Calculates average, max, min, p25, p50, p75 off of a list of samples and
@ -35,7 +35,7 @@ public class StatisticsHelper {
public static Map<String, String> process(Collection<Long> input) {
List<Long> samples = input.stream().collect(Collectors.toList());
List<Long> samples = new ArrayList<>(input);
// aftermath
Collections.sort(samples);

View file

@ -57,7 +57,7 @@ public class ThreadGate {
while (lock.getCount() > 0)
try {
if (!lock.await(60, TimeUnit.SECONDS)) {
log.warn("timeout occured!");
log.warn("timeout occurred!");
break; // break the loop
}
} catch (InterruptedException ignore) {

View file

@ -44,14 +44,13 @@ import lombok.extern.slf4j.Slf4j;
*/
@Slf4j
public class MarketStats extends Metric {
private static final String MARKETS_BISQ_NETWORK = "https://markets.bisq.network";
// poor mans JSON parser
private final Pattern marketPattern = Pattern.compile("\"market\" ?: ?\"([a-z_]+)\"");
private final Pattern amountPattern = Pattern.compile("\"amount\" ?: ?\"([\\d\\.]+)\"");
private final Pattern volumePattern = Pattern.compile("\"volume\" ?: ?\"([\\d\\.]+)\"");
private final Pattern timestampPattern = Pattern.compile("\"trade_date\" ?: ?([\\d]+)");
private final String marketApi = "https://markets.bisq.network";
private Long lastRun = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(15));
public MarketStats(Reporter reporter) {
@ -65,12 +64,12 @@ public class MarketStats extends Metric {
Map<String, String> result = new HashMap<>();
// assemble query
Long now = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
long now = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
String query = "/api/trades?format=json&market=all&timestamp_from=" + lastRun + "&timestamp_to=" + now;
lastRun = now; // thought about adding 1 second but what if a trade is done exactly in this one second?
// connect
URLConnection connection = new URL(marketApi + query).openConnection();
URLConnection connection = new URL(MARKETS_BISQ_NETWORK + query).openConnection();
// prepare to receive data
BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()));

View file

@ -34,6 +34,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@ -59,7 +60,7 @@ public class P2PMarketStats extends P2PSeedNodeSnapshotBase {
/**
* Efficient way to aggregate numbers.
*/
private class Aggregator {
private static class Aggregator {
private long value = 0;
synchronized long value() {
@ -75,7 +76,7 @@ public class P2PMarketStats extends P2PSeedNodeSnapshotBase {
}
}
private abstract class OfferStatistics<T> extends Statistics<T> {
private abstract static class OfferStatistics<T> extends Statistics<T> {
@Override
public synchronized void log(Object message) {
if (message instanceof OfferPayload) {
@ -171,14 +172,14 @@ public class P2PMarketStats extends P2PSeedNodeSnapshotBase {
int numberOfBins = 5;
// - get biggest offer
double max = input.stream().max(Long::compareTo).get() * 1.01;
double max = input.stream().max(Long::compareTo).map(value -> value * 1.01).orElse(0.0);
// - create histogram
input.stream().collect(
Collectors.groupingBy(aLong -> aLong == max ? (int) numberOfBins - 1 : (int) Math.floor(aLong / (max / numberOfBins)), Collectors.counting())).
Collectors.groupingBy(aLong -> aLong == max ? numberOfBins - 1 : (int) Math.floor(aLong / (max / numberOfBins)), Collectors.counting())).
forEach((integer, integer2) -> report.put(market + ".bin_" + integer, String.valueOf(integer2)));
report.put(market + ".number_of_bins", String.valueOf((int) numberOfBins));
report.put(market + ".number_of_bins", String.valueOf(numberOfBins));
report.put(market + ".max", String.valueOf((int) max));
}
@ -223,7 +224,9 @@ public class P2PMarketStats extends P2PSeedNodeSnapshotBase {
// do version statistics
report.clear();
versionBucketsPerHost.values().stream().findAny().get().values().forEach((version, numberOfOccurrences) -> report.put(version, String.valueOf(numberOfOccurrences.value())));
Optional<Statistics<Aggregator>> optionalStatistics = versionBucketsPerHost.values().stream().findAny();
optionalStatistics.ifPresent(aggregatorStatistics -> aggregatorStatistics.values()
.forEach((version, numberOfOccurrences) -> report.put(version, String.valueOf(numberOfOccurrences.value()))));
reporter.report(report, "versions");
}

View file

@ -91,7 +91,7 @@ public class P2PNetworkLoad extends Metric implements MessageListener, SetupList
* History implementation using a {@link LinkedHashMap} and its
* {@link LinkedHashMap#removeEldestEntry(Map.Entry)} option.
*/
private class FixedSizeHistoryTracker<K, V> extends LinkedHashMap<K, V> {
private static class FixedSizeHistoryTracker<K, V> extends LinkedHashMap<K, V> {
final int historySize;
FixedSizeHistoryTracker(int historySize) {
@ -123,7 +123,6 @@ public class P2PNetworkLoad extends Metric implements MessageListener, SetupList
hsReady.await();
// boot up P2P node
File storageDir = torHiddenServiceDir;
try {
Config config = new Config();
CorruptedStorageFileHandler corruptedStorageFileHandler = new CorruptedStorageFileHandler();
@ -133,7 +132,7 @@ public class P2PNetworkLoad extends Metric implements MessageListener, SetupList
networkProtoResolver);
DefaultSeedNodeRepository seedNodeRepository = new DefaultSeedNodeRepository(config);
PeerManager peerManager = new PeerManager(networkNode, seedNodeRepository, new ClockWatcher(),
new PersistenceManager<>(storageDir, persistenceProtoResolver, corruptedStorageFileHandler), maxConnections);
new PersistenceManager<>(torHiddenServiceDir, persistenceProtoResolver, corruptedStorageFileHandler), maxConnections);
// init file storage
peerManager.readPersisted();
@ -194,7 +193,7 @@ public class P2PNetworkLoad extends Metric implements MessageListener, SetupList
/**
* Efficient way to count message occurrences.
*/
private class Counter {
private static class Counter {
private int value = 1;
/**

View file

@ -40,7 +40,7 @@ import static com.google.common.base.Preconditions.checkNotNull;
public class P2PRoundTripTime extends P2PSeedNodeSnapshotBase {
private static final String SAMPLE_SIZE = "run.sampleSize";
private Map<Integer, Long> sentAt = new HashMap<>();
private final Map<Integer, Long> sentAt = new HashMap<>();
private Map<NodeAddress, Statistics> measurements = new HashMap<>();
public P2PRoundTripTime(Reporter reporter) {

View file

@ -49,7 +49,6 @@ import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@ -78,7 +77,7 @@ public class P2PSeedNodeSnapshot extends P2PSeedNodeSnapshotBase {
/**
* Use a counter to do statistics.
*/
private class MyStatistics extends Statistics<Set<Integer>> {
private static class MyStatistics extends Statistics<Set<Integer>> {
@Override
public synchronized void log(Object message) {
@ -141,8 +140,8 @@ public class P2PSeedNodeSnapshot extends P2PSeedNodeSnapshotBase {
try {
report.put(OnionParser.prettyPrint(host) + ".relativeNumberOfMessages." + messageType,
String.valueOf(set.size() - referenceValues.get(messageType).size()));
} catch (MalformedURLException | NullPointerException ignore) {
log.error("we should never have gotten here", ignore);
} catch (MalformedURLException | NullPointerException e) {
log.error("we should never have gotten here", e);
}
});
try {
@ -175,8 +174,12 @@ public class P2PSeedNodeSnapshot extends P2PSeedNodeSnapshotBase {
// - process dao data
perType.forEach((type, nodeAddressTupleMap) -> {
// - find head
int head = (int) nodeAddressTupleMap.values().stream().max(Comparator.comparingLong(Tuple::getHeight)).get().height;
int oldest = (int) nodeAddressTupleMap.values().stream().min(Comparator.comparingLong(Tuple::getHeight)).get().height;
int head = nodeAddressTupleMap.values().stream().max(Comparator.comparingLong(Tuple::getHeight))
.map(value -> (int) value.height)
.orElse(0);
int oldest = nodeAddressTupleMap.values().stream().min(Comparator.comparingLong(Tuple::getHeight))
.map(value -> (int) value.height)
.orElse(0);
// - update queried height
if (type.contains("DaoState"))
@ -199,10 +202,14 @@ public class P2PSeedNodeSnapshot extends P2PSeedNodeSnapshotBase {
hitcount.put(hash, 1);
});
List<ByteBuffer> states = hitcount.entrySet().stream().sorted((o1, o2) -> o2.getValue().compareTo(o1.getValue())).map(byteBufferIntegerEntry -> byteBufferIntegerEntry.getKey()).collect(Collectors.toList());
hitcount.clear();
nodeAddressTupleMap.forEach((nodeAddress, tuple) -> daoreport.put(type + "." + OnionParser.prettyPrint(nodeAddress) + ".hash", Integer.toString(Arrays.asList(states.toArray()).indexOf(ByteBuffer.wrap(tuple.hash)))));
nodeAddressTupleMap.forEach((nodeAddress, tuple) ->
daoreport.put(type + "." + OnionParser.prettyPrint(nodeAddress) + ".hash",
Integer.toString(Arrays.asList(hitcount.entrySet().stream()
.sorted((o1, o2) -> o2.getValue().compareTo(o1.getValue()))
.map(Map.Entry::getKey).toArray()).indexOf(ByteBuffer
.wrap(tuple.hash)))));
// - report reference head
daoreport.put(type + ".referenceHead", Integer.toString(head));
@ -214,7 +221,7 @@ public class P2PSeedNodeSnapshot extends P2PSeedNodeSnapshotBase {
reporter.report(daoreport, "DaoStateSnapshot");
}
private class Tuple {
private static class Tuple {
@Getter
private final long height;
private final byte[] hash;
@ -239,7 +246,7 @@ public class P2PSeedNodeSnapshot extends P2PSeedNodeSnapshotBase {
}
}
private Map<NodeAddress, Statistics<Tuple>> daoData = new ConcurrentHashMap<>();
private final Map<NodeAddress, Statistics<Tuple>> daoData = new ConcurrentHashMap<>();
protected boolean treatMessage(NetworkEnvelope networkEnvelope, Connection connection) {
checkNotNull(connection.getPeersNodeAddressProperty(),

View file

@ -117,10 +117,13 @@ public abstract class P2PSeedNodeSnapshotBase extends Metric implements MessageL
try {
CorePersistenceProtoResolver persistenceProtoResolver = new CorePersistenceProtoResolver(null, null);
//TODO will not work with historical data... should be refactored to re-use code for reading resource files
TradeStatistics3Store tradeStatistics3Store = new TradeStatistics3Store();
PersistenceManager<TradeStatistics3Store> tradeStatistics3PersistenceManager = new PersistenceManager<>(dir,
persistenceProtoResolver, null);
tradeStatistics3PersistenceManager.initialize(tradeStatistics3Store, PersistenceManager.Source.NETWORK);
tradeStatistics3PersistenceManager.initialize(tradeStatistics3Store,
tradeStatistics3Store.getDefaultStorageFileName() + networkPostfix,
PersistenceManager.Source.NETWORK);
TradeStatistics3Store persistedTradeStatistics3Store = tradeStatistics3PersistenceManager.getPersisted();
if (persistedTradeStatistics3Store != null) {
tradeStatistics3Store.getMap().putAll(persistedTradeStatistics3Store.getMap());
@ -131,7 +134,9 @@ public abstract class P2PSeedNodeSnapshotBase extends Metric implements MessageL
AccountAgeWitnessStore accountAgeWitnessStore = new AccountAgeWitnessStore();
PersistenceManager<AccountAgeWitnessStore> accountAgeWitnessPersistenceManager = new PersistenceManager<>(dir,
persistenceProtoResolver, null);
accountAgeWitnessPersistenceManager.initialize(accountAgeWitnessStore, PersistenceManager.Source.NETWORK);
accountAgeWitnessPersistenceManager.initialize(accountAgeWitnessStore,
accountAgeWitnessStore.getDefaultStorageFileName() + networkPostfix,
PersistenceManager.Source.NETWORK);
AccountAgeWitnessStore persistedAccountAgeWitnessStore = accountAgeWitnessPersistenceManager.getPersisted();
if (persistedAccountAgeWitnessStore != null) {
accountAgeWitnessStore.getMap().putAll(persistedAccountAgeWitnessStore.getMap());

View file

@ -80,7 +80,7 @@ public class GraphiteReporter extends Reporter {
String report = "bisq" + (Version.getBaseCurrencyNetwork() != 0 ? "-" + BaseCurrencyNetwork.values()[Version.getBaseCurrencyNetwork()].getNetwork() : "")
+ (prefix.isEmpty() ? "" : "." + prefix)
+ (key.isEmpty() ? "" : "." + key)
+ " " + value + " " + Long.valueOf(timeInMilliseconds) / 1000 + "\n";
+ " " + value + " " + Long.parseLong(timeInMilliseconds) / 1000 + "\n";
try {
NodeAddress nodeAddress = OnionParser.getNodeAddress(configuration.getProperty("serviceUrl"));