mirror of
https://github.com/mempool/mempool.git
synced 2025-03-03 17:47:01 +01:00
Increased disk cache chunks amount to 10 to fix json string length error.
This commit is contained in:
parent
eff4d2c8cd
commit
1e81355e7d
2 changed files with 40 additions and 16 deletions
10
backend/.gitignore
vendored
10
backend/.gitignore
vendored
|
@ -43,4 +43,12 @@ testem.log
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
cache.json
|
cache.json
|
||||||
cache2.json
|
cache1.json
|
||||||
|
cache2.json
|
||||||
|
cache3.json
|
||||||
|
cache4.json
|
||||||
|
cache5.json
|
||||||
|
cache6.json
|
||||||
|
cache7.json
|
||||||
|
cache8.json
|
||||||
|
cache9.json
|
||||||
|
|
|
@ -8,8 +8,8 @@ import logger from '../logger';
|
||||||
|
|
||||||
class DiskCache {
|
class DiskCache {
|
||||||
private static FILE_NAME = './cache.json';
|
private static FILE_NAME = './cache.json';
|
||||||
private static FILE_NAME_2 = './cache2.json';
|
private static FILE_NAMES = './cache{number}.json';
|
||||||
private static CHUNK_SIZE = 50000;
|
private static CHUNK_SIZE = 10000;
|
||||||
constructor() {
|
constructor() {
|
||||||
if (!cluster.isMaster) {
|
if (!cluster.isMaster) {
|
||||||
return;
|
return;
|
||||||
|
@ -30,15 +30,21 @@ class DiskCache {
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
logger.debug('Writing mempool and blocks data to disk cache (async)...');
|
logger.debug('Writing mempool and blocks data to disk cache (async)...');
|
||||||
const mempoolChunk_1 = Object.fromEntries(Object.entries(memPool.getMempool()).splice(0, DiskCache.CHUNK_SIZE));
|
const mempoolChunk_1 = Object.fromEntries(Object.entries(memPool.getMempool()).slice(0, DiskCache.CHUNK_SIZE));
|
||||||
const mempoolChunk_2 = Object.fromEntries(Object.entries(memPool.getMempool()).splice(DiskCache.CHUNK_SIZE));
|
|
||||||
await fsPromises.writeFile(DiskCache.FILE_NAME, JSON.stringify({
|
await fsPromises.writeFile(DiskCache.FILE_NAME, JSON.stringify({
|
||||||
blocks: blocks.getBlocks(),
|
blocks: blocks.getBlocks(),
|
||||||
mempool: mempoolChunk_1
|
mempool: mempoolChunk_1
|
||||||
}), {flag: 'w'});
|
}), {flag: 'w'});
|
||||||
await fsPromises.writeFile(DiskCache.FILE_NAME_2, JSON.stringify({
|
for (let i = 1; i < 10; i++) {
|
||||||
mempool: mempoolChunk_2
|
const mempoolChunk = Object.fromEntries(
|
||||||
}), {flag: 'w'});
|
Object.entries(memPool.getMempool()).slice(
|
||||||
|
DiskCache.CHUNK_SIZE * i, i === 9 ? undefined : DiskCache.CHUNK_SIZE * i + DiskCache.CHUNK_SIZE
|
||||||
|
)
|
||||||
|
);
|
||||||
|
await fsPromises.writeFile(DiskCache.FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
|
||||||
|
mempool: mempoolChunk
|
||||||
|
}), {flag: 'w'});
|
||||||
|
}
|
||||||
logger.debug('Mempool and blocks data saved to disk cache');
|
logger.debug('Mempool and blocks data saved to disk cache');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.warn('Error writing to cache file: ' + e.message || e);
|
logger.warn('Error writing to cache file: ' + e.message || e);
|
||||||
|
@ -48,15 +54,22 @@ class DiskCache {
|
||||||
saveCacheToDiskSync(): void {
|
saveCacheToDiskSync(): void {
|
||||||
try {
|
try {
|
||||||
logger.debug('Writing mempool and blocks data to disk cache...');
|
logger.debug('Writing mempool and blocks data to disk cache...');
|
||||||
const mempoolChunk_1 = Object.fromEntries(Object.entries(memPool.getMempool()).splice(0, DiskCache.CHUNK_SIZE));
|
const mempoolChunk_1 = Object.fromEntries(Object.entries(memPool.getMempool()).slice(0, DiskCache.CHUNK_SIZE));
|
||||||
const mempoolChunk_2 = Object.fromEntries(Object.entries(memPool.getMempool()).splice(DiskCache.CHUNK_SIZE));
|
|
||||||
fs.writeFileSync(DiskCache.FILE_NAME, JSON.stringify({
|
fs.writeFileSync(DiskCache.FILE_NAME, JSON.stringify({
|
||||||
blocks: blocks.getBlocks(),
|
blocks: blocks.getBlocks(),
|
||||||
mempool: mempoolChunk_1
|
mempool: mempoolChunk_1
|
||||||
}), {flag: 'w'});
|
}), {flag: 'w'});
|
||||||
fs.writeFileSync(DiskCache.FILE_NAME_2, JSON.stringify({
|
for (let i = 1; i < 10; i++) {
|
||||||
mempool: mempoolChunk_2
|
const mempoolChunk = Object.fromEntries(
|
||||||
}), {flag: 'w'});
|
Object.entries(memPool.getMempool()).slice(
|
||||||
|
DiskCache.CHUNK_SIZE * i, i === 9 ? undefined : DiskCache.CHUNK_SIZE * i + DiskCache.CHUNK_SIZE
|
||||||
|
)
|
||||||
|
);
|
||||||
|
fs.writeFileSync(DiskCache.FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
|
||||||
|
mempool: mempoolChunk
|
||||||
|
}), {flag: 'w'});
|
||||||
|
}
|
||||||
|
|
||||||
logger.debug('Mempool and blocks data saved to disk cache');
|
logger.debug('Mempool and blocks data saved to disk cache');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.warn('Error writing to cache file: ' + e.message || e);
|
logger.warn('Error writing to cache file: ' + e.message || e);
|
||||||
|
@ -74,9 +87,12 @@ class DiskCache {
|
||||||
data = JSON.parse(cacheData);
|
data = JSON.parse(cacheData);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fs.existsSync(DiskCache.FILE_NAME_2)) {
|
for (let i = 1; i < 10; i++) {
|
||||||
const cacheData2 = JSON.parse(fs.readFileSync(DiskCache.FILE_NAME_2, 'utf8'));
|
const fileName = DiskCache.FILE_NAMES.replace('{number}', i.toString());
|
||||||
Object.assign(data.mempool, cacheData2.mempool);
|
if (fs.existsSync(fileName)) {
|
||||||
|
const cacheData2 = JSON.parse(fs.readFileSync(fileName, 'utf8'));
|
||||||
|
Object.assign(data.mempool, cacheData2.mempool);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
memPool.setMempool(data.mempool);
|
memPool.setMempool(data.mempool);
|
||||||
|
|
Loading…
Add table
Reference in a new issue