mirror of
https://github.com/bisq-network/bisq.git
synced 2024-11-19 01:41:11 +01:00
Implement BM capping algorithm change (with delayed activation)
Change the algorithm used to adjust & cap the burn share of each BM candidate to use an unlimited number of 'rounds', as described in: https://github.com/bisq-network/proposals/issues/412 That is, instead of capping the shares once, then distributing the excess to the remaining BM, then capping again and giving any excess to the Legacy Burning Man, we cap-redistribute-cap-redistribute-... an unlimited number of times until no more candidates are capped. This has the effect of reducing the LBM's share and increasing everyone else's, alleviating the security risk of giving too much to the LBM (who is necessarily uncapped). Instead of implementing the new algorithm directly, we simply enlarge the set of candidates who should be capped to include those who would eventually be capped by the new algorithm, in order to determine how much excess burn share should go to the remaining BM. Then we apply the original method, 'candidate.calculateCappedAndAdjustedShares(..)', to set each share to be equal to its respective cap or uniformly scaled upwards from the starting amount accordingly. To this end, the static method 'BurningManService.imposeCaps' is added, which determines which candidates will eventually be capped, by sorting them in descending order of burn-share/cap-share ratio, then marking all the candidates in some suitable prefix of the list as capped, iterating through them one-by-one & gradually increasing the virtual capping round (starting at zero) until the end of the prefix is reached. (The method also determines what the uncapped adjusted burn share of each BM should be, but that only affects the BM view & burn targets.) In this way, the new algorithm runs in guaranteed O(n * log n) time. To prevent failed trades, the new algorithm is set to activate at time 'DelayedPayoutTxReceiverService.PROPOSAL_412_ACTIVATION_DATE', with a placeholder value of 12am, 1st January 2024 (UTC). This simply toggles whether the for-loop in 'imposeCaps' should stop after capping round 0, since doing so will lead to identical behaviour to the original code (even accounting for FP rounding errors).
This commit is contained in:
parent
7dfd6aa5e1
commit
4c0c11bb27
@ -42,9 +42,12 @@ import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
@ -112,6 +115,10 @@ public class BurningManService {
|
||||
///////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Map<String, BurningManCandidate> getBurningManCandidatesByName(int chainHeight) {
|
||||
return getBurningManCandidatesByName(chainHeight, !DelayedPayoutTxReceiverService.isProposal412Activated());
|
||||
}
|
||||
|
||||
Map<String, BurningManCandidate> getBurningManCandidatesByName(int chainHeight, boolean limitCappingRounds) {
|
||||
Map<String, BurningManCandidate> burningManCandidatesByName = new TreeMap<>();
|
||||
Map<P2PDataStorage.ByteArray, Set<TxOutput>> proofOfBurnOpReturnTxOutputByHash = getProofOfBurnOpReturnTxOutputByHash(chainHeight);
|
||||
|
||||
@ -187,25 +194,58 @@ public class BurningManService {
|
||||
.sum();
|
||||
burningManCandidates.forEach(candidate -> candidate.calculateShares(totalDecayedCompensationAmounts, totalDecayedBurnAmounts));
|
||||
|
||||
int numRoundsWithCapsApplied = imposeCaps(burningManCandidates, limitCappingRounds);
|
||||
|
||||
double sumAllCappedBurnAmountShares = burningManCandidates.stream()
|
||||
.filter(candidate -> candidate.getBurnAmountShare() >= candidate.getMaxBoostedCompensationShare())
|
||||
.filter(candidate -> candidate.getRoundCapped().isPresent())
|
||||
.mapToDouble(BurningManCandidate::getMaxBoostedCompensationShare)
|
||||
.sum();
|
||||
double sumAllNonCappedBurnAmountShares = burningManCandidates.stream()
|
||||
.filter(candidate -> candidate.getBurnAmountShare() < candidate.getMaxBoostedCompensationShare())
|
||||
.filter(candidate -> candidate.getRoundCapped().isEmpty())
|
||||
.mapToDouble(BurningManCandidate::getBurnAmountShare)
|
||||
.sum();
|
||||
burningManCandidates.forEach(candidate -> candidate.calculateCappedAndAdjustedShares(sumAllCappedBurnAmountShares, sumAllNonCappedBurnAmountShares));
|
||||
burningManCandidates.forEach(candidate -> candidate.calculateCappedAndAdjustedShares(
|
||||
sumAllCappedBurnAmountShares, sumAllNonCappedBurnAmountShares, numRoundsWithCapsApplied));
|
||||
|
||||
return burningManCandidatesByName;
|
||||
}
|
||||
|
||||
private static int imposeCaps(Collection<BurningManCandidate> burningManCandidates, boolean limitCappingRounds) {
|
||||
List<BurningManCandidate> candidatesInDescendingBurnCapRatio = new ArrayList<>(burningManCandidates);
|
||||
candidatesInDescendingBurnCapRatio.sort(Comparator.comparing(BurningManCandidate::getBurnCapRatio).reversed());
|
||||
double thresholdBurnCapRatio = 1.0;
|
||||
double remainingBurnShare = 1.0;
|
||||
double remainingCapShare = 1.0;
|
||||
int cappingRound = 0;
|
||||
for (BurningManCandidate candidate : candidatesInDescendingBurnCapRatio) {
|
||||
double invScaleFactor = remainingBurnShare / remainingCapShare;
|
||||
double burnCapRatio = candidate.getBurnCapRatio();
|
||||
if (remainingCapShare <= 0.0 || burnCapRatio <= 0.0 || burnCapRatio < invScaleFactor ||
|
||||
limitCappingRounds && burnCapRatio < 1.0) {
|
||||
cappingRound++;
|
||||
break;
|
||||
}
|
||||
if (burnCapRatio < thresholdBurnCapRatio) {
|
||||
thresholdBurnCapRatio = invScaleFactor;
|
||||
cappingRound++;
|
||||
}
|
||||
candidate.imposeCap(cappingRound, candidate.getBurnAmountShare() / thresholdBurnCapRatio);
|
||||
remainingBurnShare -= candidate.getBurnAmountShare();
|
||||
remainingCapShare -= candidate.getMaxBoostedCompensationShare();
|
||||
}
|
||||
return cappingRound;
|
||||
}
|
||||
|
||||
String getLegacyBurningManAddress(int chainHeight) {
|
||||
return daoStateService.getParamValue(Param.RECIPIENT_BTC_ADDRESS, chainHeight);
|
||||
}
|
||||
|
||||
Set<BurningManCandidate> getActiveBurningManCandidates(int chainHeight) {
|
||||
return getBurningManCandidatesByName(chainHeight).values().stream()
|
||||
return getActiveBurningManCandidates(chainHeight, !DelayedPayoutTxReceiverService.isProposal412Activated());
|
||||
}
|
||||
|
||||
Set<BurningManCandidate> getActiveBurningManCandidates(int chainHeight, boolean limitCappingRounds) {
|
||||
return getBurningManCandidatesByName(chainHeight, limitCappingRounds).values().stream()
|
||||
.filter(burningManCandidate -> burningManCandidate.getCappedBurnAmountShare() > 0)
|
||||
.filter(candidate -> candidate.getReceiverAddress().isPresent())
|
||||
.collect(Collectors.toSet());
|
||||
|
@ -56,11 +56,18 @@ public class DelayedPayoutTxReceiverService implements DaoStateListener {
|
||||
// requests change address.
|
||||
// See: https://github.com/bisq-network/bisq/issues/6699
|
||||
public static final Date BUGFIX_6699_ACTIVATION_DATE = Utilities.getUTCDate(2023, GregorianCalendar.JULY, 24);
|
||||
// See: https://github.com/bisq-network/proposals/issues/412
|
||||
public static final Date PROPOSAL_412_ACTIVATION_DATE = Utilities.getUTCDate(2024, GregorianCalendar.JANUARY, 1);
|
||||
|
||||
public static boolean isBugfix6699Activated() {
|
||||
return new Date().after(BUGFIX_6699_ACTIVATION_DATE);
|
||||
}
|
||||
|
||||
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
|
||||
public static boolean isProposal412Activated() {
|
||||
return new Date().after(PROPOSAL_412_ACTIVATION_DATE);
|
||||
}
|
||||
|
||||
// We don't allow to get further back than 767950 (the block height from Dec. 18th 2022).
|
||||
static final int MIN_SNAPSHOT_HEIGHT = Config.baseCurrencyNetwork().isRegtest() ? 0 : 767950;
|
||||
|
||||
|
@ -28,6 +28,7 @@ import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.OptionalInt;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@ -68,6 +69,7 @@ public class BurningManCandidate {
|
||||
// The burnAmountShare adjusted in case there are cappedBurnAmountShare.
|
||||
// We redistribute the over-burned amounts to the group of not capped candidates.
|
||||
protected double adjustedBurnAmountShare;
|
||||
private OptionalInt roundCapped = OptionalInt.empty();
|
||||
|
||||
public BurningManCandidate() {
|
||||
}
|
||||
@ -142,11 +144,19 @@ public class BurningManCandidate {
|
||||
burnAmountShare = totalDecayedBurnAmounts > 0 ? accumulatedDecayedBurnAmount / totalDecayedBurnAmounts : 0;
|
||||
}
|
||||
|
||||
public void imposeCap(int cappingRound, double adjustedBurnAmountShare) {
|
||||
roundCapped = OptionalInt.of(cappingRound);
|
||||
// NOTE: The adjusted burn share set here will not affect the final capped burn share, only
|
||||
// the presentation service, so we need not worry about rounding errors affecting consensus.
|
||||
this.adjustedBurnAmountShare = adjustedBurnAmountShare;
|
||||
}
|
||||
|
||||
public void calculateCappedAndAdjustedShares(double sumAllCappedBurnAmountShares,
|
||||
double sumAllNonCappedBurnAmountShares) {
|
||||
double sumAllNonCappedBurnAmountShares,
|
||||
int numAppliedCappingRounds) {
|
||||
double maxBoostedCompensationShare = getMaxBoostedCompensationShare();
|
||||
adjustedBurnAmountShare = burnAmountShare;
|
||||
if (burnAmountShare < maxBoostedCompensationShare) {
|
||||
if (roundCapped.isEmpty()) {
|
||||
adjustedBurnAmountShare = burnAmountShare;
|
||||
if (sumAllCappedBurnAmountShares == 0) {
|
||||
// If no one is capped we do not need to do any adjustment
|
||||
cappedBurnAmountShare = burnAmountShare;
|
||||
@ -165,7 +175,11 @@ public class BurningManCandidate {
|
||||
} else {
|
||||
// We exceeded the cap by the adjustment. This will lead to the legacy BM getting the
|
||||
// difference of the adjusted amount and the maxBoostedCompensationShare.
|
||||
// NOTE: When the number of capping rounds are unlimited (that is post- Proposal 412
|
||||
// activation), we should only get to this branch as a result of floating point rounding
|
||||
// errors. In that case, the extra amount the LBM gets is negligible.
|
||||
cappedBurnAmountShare = maxBoostedCompensationShare;
|
||||
roundCapped = OptionalInt.of(roundCapped.orElse(numAppliedCappingRounds));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -174,6 +188,12 @@ public class BurningManCandidate {
|
||||
}
|
||||
}
|
||||
|
||||
public double getBurnCapRatio() {
|
||||
// NOTE: This is less than 1.0 precisely when burnAmountShare < maxBoostedCompensationShare,
|
||||
// in spite of any floating point rounding errors, since 1.0 is proportionately at least as
|
||||
// close to the previous double as any two consecutive nonzero doubles on the number line.
|
||||
return burnAmountShare > 0.0 ? burnAmountShare / getMaxBoostedCompensationShare() : 0.0;
|
||||
}
|
||||
|
||||
public double getMaxBoostedCompensationShare() {
|
||||
return Math.min(BurningManService.MAX_BURN_SHARE, compensationShare * BurningManService.ISSUANCE_BOOST_FACTOR);
|
||||
@ -194,6 +214,7 @@ public class BurningManCandidate {
|
||||
",\r\n burnAmountShare=" + burnAmountShare +
|
||||
",\r\n cappedBurnAmountShare=" + cappedBurnAmountShare +
|
||||
",\r\n adjustedBurnAmountShare=" + adjustedBurnAmountShare +
|
||||
",\r\n roundCapped=" + roundCapped +
|
||||
"\r\n}";
|
||||
}
|
||||
}
|
||||
|
@ -48,9 +48,15 @@ public final class LegacyBurningMan extends BurningManCandidate {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
@Override
|
||||
public void imposeCap(int cappingRound, double adjustedBurnAmountShare) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
@Override
|
||||
public void calculateCappedAndAdjustedShares(double sumAllCappedBurnAmountShares,
|
||||
double sumAllNonCappedBurnAmountShares) {
|
||||
double sumAllNonCappedBurnAmountShares,
|
||||
int numAppliedCappingRounds) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
|
@ -58,6 +58,8 @@ import org.mockito.stubbing.Answer;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.junit.jupiter.api.Assertions.assertAll;
|
||||
@ -128,8 +130,9 @@ public class BurningManServiceTest {
|
||||
addCompensationIssuanceAndPayloads(Arrays.asList(tuples));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBurningManCandidatesByName_inactiveAndExpiredCandidates() {
|
||||
@ValueSource(booleans = {true, false})
|
||||
@ParameterizedTest(name = "[{index}] limitCappingRounds={0}")
|
||||
public void testGetBurningManCandidatesByName_inactiveAndExpiredCandidates(boolean limitCappingRounds) {
|
||||
addCompensationIssuanceAndPayloads(
|
||||
compensationIssuanceAndPayload("alice", "0000", 760000, 10000),
|
||||
compensationIssuanceAndPayload("bob", "0001", 690000, 20000), // expired
|
||||
@ -141,7 +144,7 @@ public class BurningManServiceTest {
|
||||
proofOfBurnTx("bob", "1001", 790000, 300000),
|
||||
proofOfBurnTx("carol", "1002", 740000, 300000) // expired
|
||||
);
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000);
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds);
|
||||
|
||||
assertEquals(0.11, candidateMap.get("alice").getMaxBoostedCompensationShare());
|
||||
assertEquals(0.0, candidateMap.get("bob").getMaxBoostedCompensationShare());
|
||||
@ -162,10 +165,16 @@ public class BurningManServiceTest {
|
||||
assertEquals(0.0, candidateMap.get("bob").getCappedBurnAmountShare());
|
||||
assertEquals(0.0, candidateMap.get("carol").getCappedBurnAmountShare());
|
||||
assertEquals(0.0, candidateMap.get("dave").getCappedBurnAmountShare());
|
||||
|
||||
assertEquals(0, candidateMap.get("alice").getRoundCapped().orElse(-1));
|
||||
assertEquals(0, candidateMap.get("bob").getRoundCapped().orElse(-1));
|
||||
assertEquals(-1, candidateMap.get("carol").getRoundCapped().orElse(-1));
|
||||
assertEquals(-1, candidateMap.get("dave").getRoundCapped().orElse(-1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBurningManCandidatesByName_capsSumToLessThanUnity_allCapped_oneCappingRoundNeeded() {
|
||||
@ValueSource(booleans = {true, false})
|
||||
@ParameterizedTest(name = "[{index}] limitCappingRounds={0}")
|
||||
public void testGetBurningManCandidatesByName_capsSumToLessThanUnity_allCapped_oneCappingRoundNeeded(boolean limitCappingRounds) {
|
||||
addCompensationIssuanceAndPayloads(
|
||||
compensationIssuanceAndPayload("alice", "0000", 760000, 10000),
|
||||
compensationIssuanceAndPayload("bob", "0001", 770000, 20000)
|
||||
@ -174,7 +183,7 @@ public class BurningManServiceTest {
|
||||
proofOfBurnTx("alice", "1000", 780000, 400000),
|
||||
proofOfBurnTx("bob", "1001", 790000, 300000)
|
||||
);
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000);
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds);
|
||||
|
||||
assertEquals(0.5, candidateMap.get("alice").getBurnAmountShare());
|
||||
assertEquals(0.5, candidateMap.get("bob").getBurnAmountShare());
|
||||
@ -184,10 +193,14 @@ public class BurningManServiceTest {
|
||||
|
||||
assertEquals(0.11, candidateMap.get("alice").getCappedBurnAmountShare());
|
||||
assertEquals(0.11, candidateMap.get("bob").getCappedBurnAmountShare());
|
||||
|
||||
assertEquals(0, candidateMap.get("alice").getRoundCapped().orElse(-1));
|
||||
assertEquals(0, candidateMap.get("bob").getRoundCapped().orElse(-1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_noneCapped_oneCappingRoundNeeded() {
|
||||
@ValueSource(booleans = {true, false})
|
||||
@ParameterizedTest(name = "[{index}] limitCappingRounds={0}")
|
||||
public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_noneCapped_oneCappingRoundNeeded(boolean limitCappingRounds) {
|
||||
addCompensationIssuanceAndPayloads(IntStream.range(0, 10).mapToObj(i ->
|
||||
compensationIssuanceAndPayload("alice" + i, "000" + i, 710000, 100000)
|
||||
).collect(Collectors.toList()));
|
||||
@ -196,7 +209,7 @@ public class BurningManServiceTest {
|
||||
proofOfBurnTx("alice" + i, "100" + i, 760000, 400000)
|
||||
).toArray(Tx[]::new));
|
||||
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000);
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds);
|
||||
|
||||
assertAll(IntStream.range(0, 10).mapToObj(i -> () -> {
|
||||
var candidate = candidateMap.get("alice" + i);
|
||||
@ -204,11 +217,13 @@ public class BurningManServiceTest {
|
||||
assertEquals(0.1, candidate.getBurnAmountShare());
|
||||
assertEquals(0.1, candidate.getAdjustedBurnAmountShare(), 1e-10);
|
||||
assertEquals(0.1, candidate.getCappedBurnAmountShare());
|
||||
assertEquals(-1, candidate.getRoundCapped().orElse(-1));
|
||||
}));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_twoCappingRoundsNeeded() {
|
||||
@ValueSource(booleans = {true, false})
|
||||
@ParameterizedTest(name = "[{index}] limitCappingRounds={0}")
|
||||
public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_twoCappingRoundsNeeded(boolean limitCappingRounds) {
|
||||
addCompensationIssuanceAndPayloads(IntStream.range(0, 10).mapToObj(i ->
|
||||
compensationIssuanceAndPayload("alice" + i, "000" + i, 710000, 100000)
|
||||
).collect(Collectors.toList()));
|
||||
@ -217,7 +232,7 @@ public class BurningManServiceTest {
|
||||
proofOfBurnTx("alice" + i, "100" + i, 760000, i < 6 ? 400000 : 200000)
|
||||
).toArray(Tx[]::new));
|
||||
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000);
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds);
|
||||
|
||||
// Note the expected rounding error below. To prevent DPT verification failures, the
|
||||
// capping algorithm output must be well defined to the nearest floating point ULP.
|
||||
@ -227,6 +242,7 @@ public class BurningManServiceTest {
|
||||
assertEquals(i < 6 ? 0.125 : 0.0625, candidate.getBurnAmountShare());
|
||||
assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10);
|
||||
assertEquals(i < 6 ? 0.11 : 0.08499999999999999, candidate.getCappedBurnAmountShare());
|
||||
assertEquals(i < 6 ? 0 : -1, candidate.getRoundCapped().orElse(-1));
|
||||
}));
|
||||
// Only two capping rounds were required to achieve a burn share total of 100%, so
|
||||
// nothing goes to the LBM in this case.
|
||||
@ -234,8 +250,9 @@ public class BurningManServiceTest {
|
||||
assertEquals(1.0, burnShareTotal);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_threeCappingRoundsNeeded() {
|
||||
@ValueSource(booleans = {true, false})
|
||||
@ParameterizedTest(name = "[{index}] limitCappingRounds={0}")
|
||||
public void testGetBurningManCandidatesByName_capsSumToMoreThanUnity_someCapped_threeCappingRoundsNeeded(boolean limitCappingRounds) {
|
||||
addCompensationIssuanceAndPayloads(IntStream.range(0, 10).mapToObj(i ->
|
||||
compensationIssuanceAndPayload("alice" + i, "000" + i, 710000, i < 8 ? 123250 : 7000)
|
||||
).collect(Collectors.toList()));
|
||||
@ -244,29 +261,38 @@ public class BurningManServiceTest {
|
||||
proofOfBurnTx("alice" + i, "100" + i, 760000, i < 6 ? 400000 : 200000)
|
||||
).toArray(Tx[]::new));
|
||||
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000);
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds);
|
||||
|
||||
// Note the expected rounding error below. To prevent DPT verification failures, the
|
||||
// Note the expected rounding errors below. To prevent DPT verification failures, the
|
||||
// capping algorithm output must be well defined to the nearest floating point ULP.
|
||||
assertAll(IntStream.range(0, 10).mapToObj(i -> () -> {
|
||||
var candidate = candidateMap.get("alice" + i);
|
||||
assertEquals(i < 8 ? 0.11 : 0.07, candidate.getMaxBoostedCompensationShare());
|
||||
assertEquals(i < 6 ? 0.125 : 0.0625, candidate.getBurnAmountShare());
|
||||
assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10);
|
||||
assertEquals(i < 6 ? 0.11 : i < 8 ? 0.08499999999999999 : 0.07, candidate.getCappedBurnAmountShare());
|
||||
if (limitCappingRounds) {
|
||||
assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10);
|
||||
assertEquals(i < 6 ? 0.11 : i < 8 ? 0.08499999999999999 : 0.07, candidate.getCappedBurnAmountShare());
|
||||
} else {
|
||||
assertEquals(i < 6 ? 0.125 : i < 8 ? 0.1 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10);
|
||||
assertEquals(i < 6 ? 0.11 : i < 8 ? 0.09999999999999998 : 0.07, candidate.getCappedBurnAmountShare());
|
||||
}
|
||||
assertEquals(i < 6 ? 0 : i < 8 ? -1 : 1, candidate.getRoundCapped().orElse(-1));
|
||||
}));
|
||||
// Three capping rounds would have been required to achieve a burn share total of
|
||||
// 100%, but our capping algorithm only applies two, so 3% ends up going to the LBM
|
||||
// in this case, instead of being distributed between `alice6` & `alice7`. The caps
|
||||
// sum to more than 100%, however, so we could have avoided giving him any.
|
||||
// Three capping rounds are required to achieve a burn share total of 100%, but our
|
||||
// algorithm only applies two when `limitCappingRounds` is true (that is, prior to
|
||||
// the activation of the capping algorithm change), so 3% ends up going to the LBM in
|
||||
// that case, instead of being distributed between `alice6` & `alice7`. The caps sum
|
||||
// to more than 100%, however, so we could have avoided giving him any.
|
||||
double capTotal = candidateMap.values().stream().mapToDouble(BurningManCandidate::getMaxBoostedCompensationShare).sum();
|
||||
double burnShareTotal = candidateMap.values().stream().mapToDouble(BurningManCandidate::getCappedBurnAmountShare).sum();
|
||||
assertEquals(1.02, capTotal);
|
||||
assertEquals(0.97, burnShareTotal);
|
||||
assertEquals(limitCappingRounds ? 0.97 : 1.0, burnShareTotal);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetBurningManCandidatesByName_capsSumToLessThanUnity_allShouldBeCapped_fourCappingRoundsNeeded() {
|
||||
@ValueSource(booleans = {true, false})
|
||||
@ParameterizedTest(name = "[{index}] limitCappingRounds={0}")
|
||||
public void testGetBurningManCandidatesByName_capsSumToLessThanUnity_allShouldBeCapped_fourCappingRoundsNeeded(
|
||||
boolean limitCappingRounds) {
|
||||
addCompensationIssuanceAndPayloads(IntStream.range(0, 10).mapToObj(i ->
|
||||
compensationIssuanceAndPayload("alice" + i, "000" + i, 710000,
|
||||
i < 6 ? 483200 : i == 6 ? 31800 : i == 7 ? 27000 : 21000)
|
||||
@ -276,7 +302,7 @@ public class BurningManServiceTest {
|
||||
proofOfBurnTx("alice" + i, "100" + i, 760000, i < 6 ? 400000 : 200000)
|
||||
).toArray(Tx[]::new));
|
||||
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000);
|
||||
var candidateMap = burningManService.getBurningManCandidatesByName(800000, limitCappingRounds);
|
||||
|
||||
// Note the expected rounding error below. To prevent DPT verification failures, the
|
||||
// capping algorithm output must be well defined to the nearest floating point ULP.
|
||||
@ -284,18 +310,26 @@ public class BurningManServiceTest {
|
||||
var candidate = candidateMap.get("alice" + i);
|
||||
assertEquals(i < 6 ? 0.11 : i == 6 ? 0.106 : i == 7 ? 0.09 : 0.07, candidate.getMaxBoostedCompensationShare());
|
||||
assertEquals(i < 6 ? 0.125 : 0.0625, candidate.getBurnAmountShare());
|
||||
assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10);
|
||||
assertEquals(i < 6 ? 0.11 : i < 8 ? 0.08499999999999999 : 0.07, candidate.getCappedBurnAmountShare());
|
||||
if (limitCappingRounds) {
|
||||
assertEquals(i < 6 ? 0.125 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10);
|
||||
assertEquals(i < 6 ? 0.11 : i < 8 ? 0.08499999999999999 : 0.07, candidate.getCappedBurnAmountShare());
|
||||
assertEquals(i < 6 ? 0 : i < 8 ? -1 : 1, candidate.getRoundCapped().orElse(-1));
|
||||
} else {
|
||||
assertEquals(i < 6 ? 0.125 : i == 6 ? 0.11 : i == 7 ? 0.1 : 0.085, candidate.getAdjustedBurnAmountShare(), 1e-10);
|
||||
assertEquals(candidate.getMaxBoostedCompensationShare(), candidate.getCappedBurnAmountShare());
|
||||
assertEquals(i < 6 ? 0 : i == 6 ? 3 : i == 7 ? 2 : 1, candidate.getRoundCapped().orElse(-1));
|
||||
}
|
||||
}));
|
||||
// Four capping rounds would have been required to achieve a maximum possible burn
|
||||
// share total of 99.6%, with all the contributors being capped. But our capping
|
||||
// algorithm only applies two rounds, so 3% ends up going to the LBM instead of the
|
||||
// minimum possible amount of 0.4% (100% less the cap sum). Contributors `alice6` &
|
||||
// `alice7` therefore receive less than they could have done.
|
||||
// Four capping rounds are required to achieve a maximum possible burn share total of
|
||||
// 99.6%, with all the contributors being capped. But our algorithm only applies two
|
||||
// rounds when `limitCappingRounds` is true (that is, prior to the activation of the
|
||||
// capping algorithm change), so 3% ends up going to the LBM in that case, instead of
|
||||
// the minimum possible amount of 0.4% (100% less the cap sum). Contributors `alice6`
|
||||
// & `alice7` therefore receive less than they could have done.
|
||||
double capTotal = candidateMap.values().stream().mapToDouble(BurningManCandidate::getMaxBoostedCompensationShare).sum();
|
||||
double burnShareTotal = candidateMap.values().stream().mapToDouble(BurningManCandidate::getCappedBurnAmountShare).sum();
|
||||
assertEquals(0.996, capTotal);
|
||||
assertEquals(0.97, burnShareTotal);
|
||||
assertEquals(limitCappingRounds ? 0.97 : capTotal, burnShareTotal);
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user