mirror of
https://github.com/mempool/mempool.git
synced 2024-11-20 02:11:49 +01:00
Merge pull request #1896 from mempool/nymkappa/feature/index-block-visualization
Index block summaries in db
This commit is contained in:
commit
2d6fcd6d67
@ -13,6 +13,7 @@
|
||||
"INITIAL_BLOCKS_AMOUNT": 8,
|
||||
"MEMPOOL_BLOCKS_AMOUNT": 8,
|
||||
"INDEXING_BLOCKS_AMOUNT": 11000,
|
||||
"BLOCKS_SUMMARIES_INDEXING": false,
|
||||
"PRICE_FEED_UPDATE_INTERVAL": 600,
|
||||
"USE_SECOND_NODE_FOR_MINFEE": false,
|
||||
"EXTERNAL_ASSETS": [],
|
||||
|
@ -20,6 +20,7 @@ import indexer from '../indexer';
|
||||
import fiatConversion from './fiat-conversion';
|
||||
import RatesRepository from '../repositories/RatesRepository';
|
||||
import poolsParser from './pools-parser';
|
||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
|
||||
class Blocks {
|
||||
private blocks: BlockExtended[] = [];
|
||||
@ -242,13 +243,68 @@ class Blocks {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* [INDEXING] Index all blocks summaries for the block txs visualization
|
||||
*/
|
||||
public async $generateBlocksSummariesDatabase() {
|
||||
if (Common.blocksSummariesIndexingEnabled() === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Get all indexed block hash
|
||||
const indexedBlocks = await blocksRepository.$getIndexedBlocks();
|
||||
const indexedBlockSummariesHashesArray = await BlocksSummariesRepository.$getIndexedSummariesId();
|
||||
|
||||
const indexedBlockSummariesHashes = {}; // Use a map for faster seek during the indexing loop
|
||||
for (const hash of indexedBlockSummariesHashesArray) {
|
||||
indexedBlockSummariesHashes[hash] = true;
|
||||
}
|
||||
|
||||
// Logging
|
||||
let newlyIndexed = 0;
|
||||
let totalIndexed = indexedBlockSummariesHashesArray.length;
|
||||
let indexedThisRun = 0;
|
||||
let timer = new Date().getTime() / 1000;
|
||||
const startedAt = new Date().getTime() / 1000;
|
||||
|
||||
for (const block of indexedBlocks) {
|
||||
if (indexedBlockSummariesHashes[block.hash] === true) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Logging
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
if (elapsedSeconds > 5) {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
|
||||
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
|
||||
const timeLeft = Math.round((indexedBlocks.length - totalIndexed) / blockPerSeconds);
|
||||
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
}
|
||||
|
||||
await this.$getStrippedBlockTransactions(block.hash, true, true); // This will index the block summary
|
||||
|
||||
// Logging
|
||||
indexedThisRun++;
|
||||
totalIndexed++;
|
||||
newlyIndexed++;
|
||||
}
|
||||
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
} catch (e) {
|
||||
logger.err(`Blocks summaries indexing failed. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* [INDEXING] Index all blocks metadata for the mining dashboard
|
||||
*/
|
||||
public async $generateBlockDatabase() {
|
||||
public async $generateBlockDatabase(): Promise<boolean> {
|
||||
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
|
||||
if (blockchainInfo.blocks !== blockchainInfo.headers) { // Wait for node to sync
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
@ -292,7 +348,7 @@ class Blocks {
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
||||
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
|
||||
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
|
||||
const timeLeft = Math.round((indexingBlockAmount - totalIndexed) / blockPerSeconds);
|
||||
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`);
|
||||
@ -316,13 +372,16 @@ class Blocks {
|
||||
} catch (e) {
|
||||
logger.err('Block indexing failed. Trying again later. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
loadingIndicators.setProgress('block-indexing', 100);
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
const chainValid = await BlocksRepository.$validateChain();
|
||||
if (!chainValid) {
|
||||
indexer.reindex();
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public async $updateBlocks() {
|
||||
@ -387,11 +446,19 @@ class Blocks {
|
||||
// We assume there won't be a reorg with more than 10 block depth
|
||||
await BlocksRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
|
||||
await HashratesRepository.$deleteLastEntries();
|
||||
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
|
||||
for (let i = 10; i >= 0; --i) {
|
||||
await this.$indexBlock(lastBlock['height'] - i);
|
||||
const newBlock = await this.$indexBlock(lastBlock['height'] - i);
|
||||
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
|
||||
}
|
||||
logger.info(`Re-indexed 10 blocks and summaries`);
|
||||
}
|
||||
await blocksRepository.$saveBlockInDatabase(blockExtended);
|
||||
|
||||
// Save blocks summary for visualization if it's enabled
|
||||
if (Common.blocksSummariesIndexingEnabled() === true) {
|
||||
await this.$getStrippedBlockTransactions(blockExtended.id, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fiatConversion.ratesInitialized === true && config.DATABASE.ENABLED === true) {
|
||||
@ -477,14 +544,34 @@ class Blocks {
|
||||
return blockExtended;
|
||||
}
|
||||
|
||||
public async $getStrippedBlockTransactions(hash: string): Promise<TransactionStripped[]> {
|
||||
// Check the memory cache
|
||||
const cachedSummary = this.getBlockSummaries().find((b) => b.id === hash);
|
||||
if (cachedSummary) {
|
||||
return cachedSummary.transactions;
|
||||
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache: boolean = false,
|
||||
skipDBLookup: boolean = false): Promise<TransactionStripped[]>
|
||||
{
|
||||
if (skipMemoryCache === false) {
|
||||
// Check the memory cache
|
||||
const cachedSummary = this.getBlockSummaries().find((b) => b.id === hash);
|
||||
if (cachedSummary) {
|
||||
return cachedSummary.transactions;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if it's indexed in db
|
||||
if (skipDBLookup === false && Common.blocksSummariesIndexingEnabled() === true) {
|
||||
const indexedSummary = await BlocksSummariesRepository.$getByBlockId(hash);
|
||||
if (indexedSummary !== undefined) {
|
||||
return indexedSummary.transactions;
|
||||
}
|
||||
}
|
||||
|
||||
// Call Core RPC
|
||||
const block = await bitcoinClient.getBlock(hash, 2);
|
||||
const summary = this.summarizeBlock(block);
|
||||
|
||||
// Index the response if needed
|
||||
if (Common.blocksSummariesIndexingEnabled() === true) {
|
||||
await BlocksSummariesRepository.$saveSummary(block.height, summary);
|
||||
}
|
||||
|
||||
return summary.transactions;
|
||||
}
|
||||
|
||||
|
@ -177,4 +177,11 @@ export class Common {
|
||||
config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== 0
|
||||
);
|
||||
}
|
||||
|
||||
static blocksSummariesIndexingEnabled(): boolean {
|
||||
return (
|
||||
Common.indexingEnabled() &&
|
||||
config.MEMPOOL.BLOCKS_SUMMARIES_INDEXING === true
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 19;
|
||||
private static currentVersion = 20;
|
||||
private queryTimeout = 120000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
@ -217,6 +217,10 @@ class DatabaseMigration {
|
||||
if (databaseSchemaVersion < 19) {
|
||||
await this.$executeQuery(this.getCreateRatesTableQuery(), await this.$checkIfTableExists('rates'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 20 && isBitcoin === true) {
|
||||
await this.$executeQuery(this.getCreateBlocksSummariesTableQuery(), await this.$checkIfTableExists('blocks_summaries'));
|
||||
}
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
@ -512,6 +516,16 @@ class DatabaseMigration {
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateBlocksSummariesTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS blocks_summaries (
|
||||
height int(10) unsigned NOT NULL,
|
||||
id varchar(65) NOT NULL,
|
||||
transactions JSON NOT NULL,
|
||||
PRIMARY KEY (id),
|
||||
INDEX (height)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
public async $truncateIndexedData(tables: string[]) {
|
||||
const allowedTables = ['blocks', 'hashrates'];
|
||||
|
||||
|
@ -15,6 +15,7 @@ interface IConfig {
|
||||
INITIAL_BLOCKS_AMOUNT: number;
|
||||
MEMPOOL_BLOCKS_AMOUNT: number;
|
||||
INDEXING_BLOCKS_AMOUNT: number;
|
||||
BLOCKS_SUMMARIES_INDEXING: boolean;
|
||||
PRICE_FEED_UPDATE_INTERVAL: number;
|
||||
USE_SECOND_NODE_FOR_MINFEE: boolean;
|
||||
EXTERNAL_ASSETS: string[];
|
||||
@ -104,6 +105,7 @@ const defaults: IConfig = {
|
||||
'INITIAL_BLOCKS_AMOUNT': 8,
|
||||
'MEMPOOL_BLOCKS_AMOUNT': 8,
|
||||
'INDEXING_BLOCKS_AMOUNT': 11000, // 0 = disable indexing, -1 = index all blocks
|
||||
'BLOCKS_SUMMARIES_INDEXING': false,
|
||||
'PRICE_FEED_UPDATE_INTERVAL': 600,
|
||||
'USE_SECOND_NODE_FOR_MINFEE': false,
|
||||
'EXTERNAL_ASSETS': [],
|
||||
|
@ -29,10 +29,17 @@ class Indexer {
|
||||
this.indexerRunning = true;
|
||||
|
||||
try {
|
||||
await blocks.$generateBlockDatabase();
|
||||
const chainValid = await blocks.$generateBlockDatabase();
|
||||
if (chainValid === false) {
|
||||
// Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration
|
||||
this.indexerRunning = false;
|
||||
return;
|
||||
}
|
||||
|
||||
await this.$resetHashratesIndexingState();
|
||||
await mining.$generateNetworkHashrateHistory();
|
||||
await mining.$generatePoolHashrateHistory();
|
||||
await blocks.$generateBlocksSummariesDatabase();
|
||||
} catch (e) {
|
||||
this.reindex();
|
||||
logger.err(`Indexer failed, trying again later. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
|
@ -6,6 +6,7 @@ import { prepareBlock } from '../utils/blocks-utils';
|
||||
import PoolsRepository from './PoolsRepository';
|
||||
import HashratesRepository from './HashratesRepository';
|
||||
import { escape } from 'mysql2';
|
||||
import BlocksSummariesRepository from './BlocksSummariesRepository';
|
||||
|
||||
class BlocksRepository {
|
||||
/**
|
||||
@ -495,6 +496,7 @@ class BlocksRepository {
|
||||
if (blocks[idx].previous_block_hash !== blocks[idx - 1].hash) {
|
||||
logger.warn(`Chain divergence detected at block ${blocks[idx - 1].height}, re-indexing newer blocks and hashrates`);
|
||||
await this.$deleteBlocksFrom(blocks[idx - 1].height);
|
||||
await BlocksSummariesRepository.$deleteBlocksFrom(blocks[idx - 1].height);
|
||||
await HashratesRepository.$deleteHashratesFromTimestamp(blocks[idx - 1].timestamp - 604800);
|
||||
return false;
|
||||
}
|
||||
@ -652,6 +654,19 @@ class BlocksRepository {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of blocks that have been indexed
|
||||
*/
|
||||
public async $getIndexedBlocks(): Promise<any[]> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT height, hash FROM blocks ORDER BY height DESC`);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot generate block size and weight history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksRepository();
|
||||
|
59
backend/src/repositories/BlocksSummariesRepository.ts
Normal file
59
backend/src/repositories/BlocksSummariesRepository.ts
Normal file
@ -0,0 +1,59 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { BlockSummary } from '../mempool.interfaces';
|
||||
|
||||
class BlocksSummariesRepository {
|
||||
public async $getByBlockId(id: string): Promise<BlockSummary | undefined> {
|
||||
try {
|
||||
const [summary]: any[] = await DB.query(`SELECT * from blocks_summaries WHERE id = ?`, [id]);
|
||||
if (summary.length > 0) {
|
||||
summary[0].transactions = JSON.parse(summary[0].transactions);
|
||||
return summary[0];
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get block summary for block id ${id}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
public async $saveSummary(height: number, summary: BlockSummary) {
|
||||
try {
|
||||
await DB.query(`INSERT INTO blocks_summaries VALUE (?, ?, ?)`, [height, summary.id, JSON.stringify(summary.transactions)]);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save block summary for ${summary.id} because it has already been indexed, ignoring`);
|
||||
} else {
|
||||
logger.debug(`Cannot save block summary for ${summary.id}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $getIndexedSummariesId(): Promise<string[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`);
|
||||
return rows.map(row => row.id);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get block summaries id list. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete blocks from the database from blockHeight
|
||||
*/
|
||||
public async $deleteBlocksFrom(blockHeight: number) {
|
||||
logger.info(`Delete newer blocks summary from height ${blockHeight} from the database`);
|
||||
|
||||
try {
|
||||
await DB.query(`DELETE FROM blocks_summaries where height >= ${blockHeight}`);
|
||||
} catch (e) {
|
||||
logger.err('Cannot delete indexed blocks summaries. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksSummariesRepository();
|
||||
|
@ -729,7 +729,7 @@ class Routes {
|
||||
public async getStrippedBlockTransactions(req: Request, res: Response) {
|
||||
try {
|
||||
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
|
@ -19,7 +19,8 @@
|
||||
"EXTERNAL_RETRY_INTERVAL": __MEMPOOL_EXTERNAL_RETRY_INTERVAL__,
|
||||
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
|
||||
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
|
||||
"INDEXING_BLOCKS_AMOUNT": __MEMPOOL_INDEXING_BLOCKS_AMOUNT__
|
||||
"INDEXING_BLOCKS_AMOUNT": __MEMPOOL_INDEXING_BLOCKS_AMOUNT__,
|
||||
"BLOCKS_SUMMARIES_INDEXING": __MEMPOOL_BLOCKS_SUMMARIES_INDEXING__
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "__CORE_RPC_HOST__",
|
||||
|
@ -14,6 +14,7 @@ __MEMPOOL_BLOCK_WEIGHT_UNITS__=${MEMPOOL_BLOCK_WEIGHT_UNITS:=4000000}
|
||||
__MEMPOOL_INITIAL_BLOCKS_AMOUNT__=${MEMPOOL_INITIAL_BLOCKS_AMOUNT:=8}
|
||||
__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__=${MEMPOOL_MEMPOOL_BLOCKS_AMOUNT:=8}
|
||||
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=11000}
|
||||
__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__=${MEMPOOL_BLOCKS_SUMMARIES_INDEXING:=false}
|
||||
__MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__=${MEMPOOL_PRICE_FEED_UPDATE_INTERVAL:=600}
|
||||
__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__=${MEMPOOL_USE_SECOND_NODE_FOR_MINFEE:=false}
|
||||
__MEMPOOL_EXTERNAL_ASSETS__=${MEMPOOL_EXTERNAL_ASSETS:=[]}
|
||||
@ -101,6 +102,7 @@ sed -i "s/__MEMPOOL_BLOCK_WEIGHT_UNITS__/${__MEMPOOL_BLOCK_WEIGHT_UNITS__}/g" me
|
||||
sed -i "s/__MEMPOOL_INITIAL_BLOCKS_AMOUNT__/${__MEMPOOL_INITIAL_BLOCKS_AMOUNT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__/${__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__/${__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__/${__MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__/${__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__}/g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_EXTERNAL_ASSETS__!${__MEMPOOL_EXTERNAL_ASSETS__}!g" mempool-config.json
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Injectable } from '@angular/core';
|
||||
import { HttpClient, HttpParams } from '@angular/common/http';
|
||||
import { CpfpInfo, OptimizedMempoolStats, AddressInformation, LiquidPegs, ITranslators,
|
||||
PoolsStats, PoolStat, BlockExtended, TransactionStripped, RewardStats } from '../interfaces/node-api.interface';
|
||||
PoolStat, BlockExtended, TransactionStripped, RewardStats } from '../interfaces/node-api.interface';
|
||||
import { Observable } from 'rxjs';
|
||||
import { StateService } from './state.service';
|
||||
import { WebsocketResponse } from '../interfaces/websocket.interface';
|
||||
|
@ -9,6 +9,7 @@
|
||||
"CLEAR_PROTECTION_MINUTES": 5,
|
||||
"POLL_RATE_MS": 1000,
|
||||
"INDEXING_BLOCKS_AMOUNT": -1,
|
||||
"BLOCKS_SUMMARIES_INDEXING": true,
|
||||
"USE_SECOND_NODE_FOR_MINFEE": true
|
||||
},
|
||||
"SYSLOG" : {
|
||||
|
@ -4,14 +4,42 @@ location /api/v1/statistics {
|
||||
location /api/v1/mining {
|
||||
try_files /dev/null @mempool-api-v1-warmcache;
|
||||
}
|
||||
location /api/v1/block {
|
||||
try_files /dev/null @mempool-api-v1-forevercache;
|
||||
}
|
||||
location /api/v1 {
|
||||
try_files /dev/null @mempool-api-v1-coldcache;
|
||||
}
|
||||
location /api/block {
|
||||
rewrite ^/api/(.*) /$1 break;
|
||||
try_files /dev/null @electrs-api-forevercache;
|
||||
}
|
||||
location /api/ {
|
||||
rewrite ^/api/(.*) /$1 break;
|
||||
try_files /dev/null @electrs-api-nocache;
|
||||
}
|
||||
|
||||
location @mempool-api-v1-forevercache {
|
||||
proxy_pass $mempoolBackend;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_cache_background_update on;
|
||||
proxy_cache_use_stale updating;
|
||||
proxy_cache api;
|
||||
proxy_cache_valid 200 30d;
|
||||
proxy_redirect off;
|
||||
|
||||
expires 30d;
|
||||
}
|
||||
|
||||
location @mempool-api-v1-warmcache {
|
||||
proxy_pass $mempoolBackend;
|
||||
proxy_http_version 1.1;
|
||||
@ -46,6 +74,7 @@ location @mempool-api-v1-coldcache {
|
||||
proxy_cache api;
|
||||
proxy_cache_valid 200 10s;
|
||||
proxy_redirect off;
|
||||
|
||||
expires 10s;
|
||||
}
|
||||
|
||||
@ -81,4 +110,27 @@ location @electrs-api-nocache {
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_redirect off;
|
||||
proxy_buffering off;
|
||||
|
||||
expires -1;
|
||||
}
|
||||
|
||||
location @electrs-api-forevercache {
|
||||
proxy_pass $electrsBackend;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_cache_background_update on;
|
||||
proxy_cache_use_stale updating;
|
||||
proxy_cache api;
|
||||
proxy_cache_valid 200 30d;
|
||||
proxy_redirect off;
|
||||
|
||||
expires 30d;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user