BloomFilter: use signed integer for the nonce (nTweak)

It's arbitrary data and we're not supposed to do math on it, so it's
safe to use a signed Java `int`.

Also use `new Random().nextInt()` to generate the nonce. We were
previously generating a random double, multiplying by `Long.MAX_VALUE`
and then truncating the low 32-bits of the long. Using `.nextInt()`
might actually generate MORE randomness since there will be no truncation.
This commit is contained in:
Sean Gilligan 2023-03-24 09:24:58 -07:00 committed by Andreas Schildbach
parent b39ad1904d
commit f3c4b5fcf4
13 changed files with 64 additions and 24 deletions

View file

@ -153,6 +153,17 @@ public class ByteUtils {
writeUint16BE(val, ByteBuffer.wrap(out, offset, out.length - offset));
}
/**
* Write 4 bytes to the buffer as signed 32-bit integer in little endian format.
* @param val value to be written
* @param buf buffer to be written into
* @return the buffer
* @throws BufferOverflowException if the value doesn't fit the remaining buffer
*/
public static ByteBuffer writeInt32LE(int val, ByteBuffer buf) throws BufferOverflowException {
return buf.order(ByteOrder.LITTLE_ENDIAN).putInt(val);
}
/**
* Write 4 bytes to the buffer as unsigned 32-bit integer in little endian format.
* @param val value to be written
@ -252,6 +263,18 @@ public class ByteUtils {
stream.write(buf);
}
/**
* Write 4 bytes to the output stream as signed 32-bit integer in little endian format.
* @param val value to be written
* @param stream stream to be written into
* @throws IOException if an I/O error occurs
*/
public static void writeInt32LE(int val, OutputStream stream) throws IOException {
byte[] buf = new byte[4];
writeInt32LE(val, ByteBuffer.wrap(buf));
stream.write(buf);
}
/**
* Write 4 bytes to the output stream as unsigned 32-bit integer in little endian format.
* @param val value to be written
@ -360,6 +383,16 @@ public class ByteUtils {
return Integer.toUnsignedLong(buf.order(ByteOrder.LITTLE_ENDIAN).getInt());
}
/**
* Read 4 bytes from the byte array (starting at the offset) as signed 32-bit integer in little endian format.
* @param buf buffer to be read from
* @return read integer
* @throws BufferUnderflowException if the read value extends beyond the remaining bytes of the buffer
*/
public static int readInt32(ByteBuffer buf) throws BufferUnderflowException {
return buf.order(ByteOrder.LITTLE_ENDIAN).getInt();
}
/**
* Read 4 bytes from the byte array (starting at the offset) as unsigned 32-bit integer in little endian format.
* @param bytes buffer to be read from

View file

@ -66,7 +66,7 @@ public class BloomFilter extends Message {
private byte[] data;
private long hashFuncs;
private long nTweak;
private int nTweak;
private byte nFlags;
// Same value as Bitcoin Core
@ -85,10 +85,10 @@ public class BloomFilter extends Message {
/**
* Constructs a filter with the given parameters which is updated on P2PK outputs only.
*/
public BloomFilter(int elements, double falsePositiveRate, long randomNonce) {
public BloomFilter(int elements, double falsePositiveRate, int randomNonce) {
this(elements, falsePositiveRate, randomNonce, BloomUpdate.UPDATE_P2PUBKEY_ONLY);
}
/**
* <p>Constructs a new Bloom Filter which will provide approximately the given false positive rate when the given
* number of elements have been inserted. If the filter would otherwise be larger than the maximum allowed size,
@ -118,7 +118,7 @@ public class BloomFilter extends Message {
* of this flag is to reduce network round-tripping and avoid over-dirtying the filter for the most common
* wallet configurations.</p>
*/
public BloomFilter(int elements, double falsePositiveRate, long randomNonce, BloomUpdate updateFlag) {
public BloomFilter(int elements, double falsePositiveRate, int randomNonce, BloomUpdate updateFlag) {
// The following formulas were stolen from Wikipedia's page on Bloom Filters (with the addition of min(..., MAX_...))
// Size required for a given number of elements and false-positive rate
int size = (int)(-1 / (pow(log(2), 2)) * elements * log(falsePositiveRate));
@ -155,7 +155,7 @@ public class BloomFilter extends Message {
hashFuncs = readUint32();
if (hashFuncs > MAX_HASH_FUNCS)
throw new ProtocolException("Bloom filter hash function count out of range");
nTweak = readUint32();
nTweak = readInt32();
nFlags = readByte();
}
@ -167,7 +167,7 @@ public class BloomFilter extends Message {
stream.write(VarInt.of(data.length).encode());
stream.write(data);
ByteUtils.writeUint32LE(hashFuncs, stream);
ByteUtils.writeUint32LE(nTweak, stream);
ByteUtils.writeInt32LE(nTweak, stream);
stream.write(nFlags);
}

View file

@ -147,6 +147,10 @@ public abstract class Message {
return bitcoinSerialize().length;
}
protected int readInt32() throws BufferUnderflowException {
return ByteUtils.readInt32(payload);
}
protected long readUint32() throws BufferUnderflowException {
return ByteUtils.readUint32(payload);
}

View file

@ -42,7 +42,7 @@ public interface PeerFilterProvider {
/**
* Called on all registered filter providers before {@link #getBloomFilterElementCount()} and
* {@link #getBloomFilter(int, double, long)} are called. Once called, the provider should ensure that the items
* {@link #getBloomFilter(int, double, int)} are called. Once called, the provider should ensure that the items
* it will want to insert into the filter don't change. The reason is that all providers will have their element
* counts queried, and then a filter big enough for all of them will be specified. So the provider must use
* consistent state. There is guaranteed to be a matching call to {@link #endBloomFilterCalculation()} that can
@ -52,7 +52,7 @@ public interface PeerFilterProvider {
/**
* Gets the number of elements that will be added to a bloom filter returned by
* {@link PeerFilterProvider#getBloomFilter(int, double, long)}
* {@link PeerFilterProvider#getBloomFilter(int, double, int)}
*/
int getBloomFilterElementCount();
@ -60,7 +60,7 @@ public interface PeerFilterProvider {
* Gets a bloom filter that contains all the necessary elements for the listener to receive relevant transactions.
* Default value should be an empty bloom filter with the given size, falsePositiveRate, and nTweak.
*/
BloomFilter getBloomFilter(int size, double falsePositiveRate, long nTweak);
BloomFilter getBloomFilter(int size, double falsePositiveRate, int nTweak);
/**
* See {@link #beginBloomFilterCalculation()}.

View file

@ -1420,7 +1420,7 @@ public class PeerGroup implements TransactionBroadcaster {
* <p>Be careful regenerating the bloom filter too often, as it decreases anonymity because remote nodes can
* compare transactions against both the new and old filters to significantly decrease the false positive rate.</p>
*
* <p>See the docs for {@link BloomFilter#BloomFilter(int, double, long, BloomFilter.BloomUpdate)} for a brief
* <p>See the docs for {@link BloomFilter#BloomFilter(int, double, int, BloomFilter.BloomUpdate)} for a brief
* explanation of anonymity when using bloom filters.</p>
*/
public void setBloomFilterFalsePositiveRate(double bloomFilterFPRate) {

View file

@ -26,6 +26,7 @@ import java.time.temporal.ChronoUnit;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
// This code is unit tested by the PeerGroup tests.
@ -43,7 +44,7 @@ import java.util.List;
*/
public class FilterMerger {
// We use a constant tweak to avoid giving up privacy when we regenerate our filter with new keys
private final long bloomFilterTweak = (long) (Math.random() * Long.MAX_VALUE);
private final int bloomFilterTweak = new Random().nextInt();
private volatile double vBloomFilterFPRate;
private int lastBloomFilterElementCount;

View file

@ -606,7 +606,7 @@ public class BasicKeyChain implements EncryptableKeyChain {
@Override
public BloomFilter getFilter(int size, double falsePositiveRate, long tweak) {
public BloomFilter getFilter(int size, double falsePositiveRate, int tweak) {
lock.lock();
try {
BloomFilter filter = new BloomFilter(size, falsePositiveRate, tweak);

View file

@ -1140,7 +1140,7 @@ public class DeterministicKeyChain implements EncryptableKeyChain {
}
@Override
public BloomFilter getFilter(int size, double falsePositiveRate, long tweak) {
public BloomFilter getFilter(int size, double falsePositiveRate, int tweak) {
lock.lock();
try {
checkArgument(size >= numBloomFilterEntries());

View file

@ -72,7 +72,7 @@ public interface KeyChain {
/**
* Returns the number of elements this chain wishes to insert into the Bloom filter. The size passed to
* {@link #getFilter(int, double, long)} should be at least this large.
* {@link #getFilter(int, double, int)} should be at least this large.
*/
int numBloomFilterEntries();
@ -100,8 +100,8 @@ public interface KeyChain {
* <p>This is used to generate a {@link BloomFilter} which can be {@link BloomFilter#merge(BloomFilter)}d with
* another. It could also be used if you have a specific target for the filter's size.</p>
*
* <p>See the docs for {@link BloomFilter#BloomFilter(int, double, long)} for a brief
* <p>See the docs for {@link BloomFilter#BloomFilter(int, double, int)} for a brief
* explanation of anonymity when using bloom filters, and for the meaning of these parameters.</p>
*/
BloomFilter getFilter(int size, double falsePositiveRate, long tweak);
BloomFilter getFilter(int size, double falsePositiveRate, int tweak);
}

View file

@ -889,7 +889,7 @@ public class KeyChainGroup implements KeyBag {
return result;
}
public BloomFilter getBloomFilter(int size, double falsePositiveRate, long nTweak) {
public BloomFilter getBloomFilter(int size, double falsePositiveRate, int nTweak) {
BloomFilter filter = new BloomFilter(size, falsePositiveRate, nTweak);
if (basic.numKeys() > 0)
filter.merge(basic.getFilter(size, falsePositiveRate, nTweak));

View file

@ -285,7 +285,7 @@ public class MarriedKeyChain extends DeterministicKeyChain {
}
@Override
public BloomFilter getFilter(int size, double falsePositiveRate, long tweak) {
public BloomFilter getFilter(int size, double falsePositiveRate, int tweak) {
lock.lock();
BloomFilter filter;
try {

View file

@ -130,6 +130,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.CompletableFuture;
@ -4992,7 +4993,7 @@ public class Wallet extends BaseTaggableObject
public BloomFilter getBloomFilter(double falsePositiveRate) {
beginBloomFilterCalculation();
try {
return getBloomFilter(getBloomFilterElementCount(), falsePositiveRate, (long) (Math.random() * Long.MAX_VALUE));
return getBloomFilter(getBloomFilterElementCount(), falsePositiveRate, new Random().nextInt());
} finally {
endBloomFilterCalculation();
}
@ -5006,11 +5007,11 @@ public class Wallet extends BaseTaggableObject
* <p>This is used to generate a BloomFilter which can be {@link BloomFilter#merge(BloomFilter)}d with another.
* It could also be used if you have a specific target for the filter's size.</p>
*
* <p>See the docs for {@link BloomFilter#BloomFilter(int, double, long, BloomFilter.BloomUpdate)} for a brief explanation of anonymity when using bloom
* <p>See the docs for {@link BloomFilter#BloomFilter(int, double, int, BloomFilter.BloomUpdate)} for a brief explanation of anonymity when using bloom
* filters.</p>
*/
@Override @GuardedBy("keyChainGroupLock")
public BloomFilter getBloomFilter(int size, double falsePositiveRate, long nTweak) {
public BloomFilter getBloomFilter(int size, double falsePositiveRate, int nTweak) {
beginBloomFilterCalculation();
try {
BloomFilter filter = keyChainGroup.getBloomFilter(size, falsePositiveRate, nTweak);

View file

@ -44,6 +44,7 @@ import java.time.temporal.ChronoUnit;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Random;
import java.util.concurrent.atomic.AtomicReference;
import static org.junit.Assert.assertEquals;
@ -343,7 +344,7 @@ public class KeyChainGroupTest {
public void bloom() {
ECKey key1 = group.freshKey(KeyChain.KeyPurpose.RECEIVE_FUNDS);
ECKey key2 = new ECKey();
BloomFilter filter = group.getBloomFilter(group.getBloomFilterElementCount(), 0.001, (long)(Math.random() * Long.MAX_VALUE));
BloomFilter filter = group.getBloomFilter(group.getBloomFilterElementCount(), 0.001, new Random().nextInt());
assertTrue(filter.contains(key1.getPubKeyHash()));
assertTrue(filter.contains(key1.getPubKey()));
assertFalse(filter.contains(key2.getPubKey()));
@ -355,7 +356,7 @@ public class KeyChainGroupTest {
// We ran ahead of the lookahead buffer.
assertFalse(filter.contains(group.freshKey(KeyChain.KeyPurpose.RECEIVE_FUNDS).getPubKey()));
group.importKeys(key2);
filter = group.getBloomFilter(group.getBloomFilterElementCount(), 0.001, (long) (Math.random() * Long.MAX_VALUE));
filter = group.getBloomFilter(group.getBloomFilterElementCount(), 0.001, new Random().nextInt());
assertTrue(filter.contains(key1.getPubKeyHash()));
assertTrue(filter.contains(key1.getPubKey()));
assertTrue(filter.contains(key2.getPubKey()));
@ -386,7 +387,7 @@ public class KeyChainGroupTest {
assertEquals(expected, group.getBloomFilterElementCount());
Address address1 = group.freshAddress(KeyChain.KeyPurpose.RECEIVE_FUNDS);
assertEquals(expected, group.getBloomFilterElementCount());
BloomFilter filter = group.getBloomFilter(expected + 2, 0.001, (long)(Math.random() * Long.MAX_VALUE));
BloomFilter filter = group.getBloomFilter(expected + 2, 0.001, new Random().nextInt());
assertTrue(filter.contains(address1.getHash()));
Address address2 = group.freshAddress(KeyChain.KeyPurpose.CHANGE);