feat: chaintip-based cache-control, caching with zero stale data (#834)

http chaintip cache-control implemented for the following routes:
* /extended/v1/block
* /extended/v1/block/:hash
* /extended/v1/block/by_height/:height
* /extended/v1/block/by_burn_block_height/:burnBlockHeight
* /extended/v1/block/by_burn_block_hash/:burnBlockHash
* /extended/v1/tx
This commit is contained in:
Matthew Little
2021-12-07 20:13:40 +01:00
committed by GitHub
parent f993e0d2ef
commit 581bef4b2a
10 changed files with 804 additions and 113 deletions

38
src/api/async-handler.ts Normal file
View File

@@ -0,0 +1,38 @@
/**
* Inspired by https://github.com/Abazhenov/express-async-handler
* Modified to improve type definitions so that they fully match the
* types specified in the original (synchronous) Express handler.
*
* This is used as an alternative to the async Express extensions
* provided by the `@awaitjs/express` lib, e.g. `router.getAsync`,
* because it has incorrect/bugged behavior when given multiple router
* handler functions. It executes both synchronously at the same time,
* breaking the ability for the handlers to control the route flow
* based on the order in which they are specified.
*/
import * as express from 'express';
import * as core from 'express-serve-static-core';
export function asyncHandler<
P = core.ParamsDictionary,
ResBody = any,
ReqBody = any,
ReqQuery = core.Query,
Locals extends Record<string, any> = Record<string, any>
>(
handler: (
...args: Parameters<express.RequestHandler<P, ResBody, ReqBody, ReqQuery, Locals>>
) => void | Promise<void>
): express.RequestHandler<P, ResBody, ReqBody, ReqQuery, Locals> {
return function asyncUtilWrap(
...args: Parameters<express.RequestHandler<P, ResBody, ReqBody, ReqQuery, Locals>>
) {
const next = args[args.length - 1] as core.NextFunction;
try {
const fnReturn = handler(...args);
return Promise.resolve(fnReturn).catch(next);
} catch (error) {
next(error);
}
};
}

View File

@@ -0,0 +1,193 @@
import { RequestHandler, Request, Response } from 'express';
import * as prom from 'prom-client';
import { FoundOrNot, logger } from '../../helpers';
import { DataStore, DbChainTip } from '../../datastore/common';
import { asyncHandler } from '../async-handler';
const CACHE_OK = Symbol('cache_ok');
const CHAIN_TIP_LOCAL = 'chain_tip';
interface ChainTipCacheMetrics {
chainTipCacheHits: prom.Counter<string>;
chainTipCacheMisses: prom.Counter<string>;
chainTipCacheNoHeader: prom.Counter<string>;
}
let _chainTipMetrics: ChainTipCacheMetrics | undefined;
function getChainTipMetrics(): ChainTipCacheMetrics {
if (_chainTipMetrics !== undefined) {
return _chainTipMetrics;
}
const metrics: ChainTipCacheMetrics = {
chainTipCacheHits: new prom.Counter({
name: 'chain_tip_cache_hits',
help: 'Total count of requests with an up-to-date chain tip cache header',
}),
chainTipCacheMisses: new prom.Counter({
name: 'chain_tip_cache_misses',
help: 'Total count of requests with a stale chain tip cache header',
}),
chainTipCacheNoHeader: new prom.Counter({
name: 'chain_tip_cache_no_header',
help: 'Total count of requests that did not provide a chain tip header',
}),
};
_chainTipMetrics = metrics;
return _chainTipMetrics;
}
export function setResponseNonCacheable(res: Response) {
res.removeHeader('Cache-Control');
res.removeHeader('ETag');
}
/**
* Sets the response `Cache-Control` and `ETag` headers using the chain tip previously added
* to the response locals.
* Uses the latest unanchored microblock hash if available, otherwise uses the anchor
* block index hash.
*/
export function setChainTipCacheHeaders(res: Response) {
const chainTip: FoundOrNot<DbChainTip> | undefined = res.locals[CHAIN_TIP_LOCAL];
if (!chainTip) {
logger.error(
`Cannot set cache control headers, no chain tip was set on \`Response.locals[CHAIN_TIP_LOCAL]\`.`
);
return;
}
if (!chainTip.found) {
return;
}
const chainTipTag = chainTip.result.microblockHash ?? chainTip.result.indexBlockHash;
res.set({
// This is the equivalent of `public, max-age=0, must-revalidate`.
// `public` == allow proxies/CDNs to cache as opposed to only local browsers.
// `no-cache` == clients can cache a resource but should revalidate each time before using it.
'Cache-Control': 'public, no-cache',
// Use the current chain tip `indexBlockHash` as the etag so that cache is invalidated on new blocks.
// This value will be provided in the `If-None-Match` request header in subsequent requests.
// See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag
// > Entity tag that uniquely represents the requested resource.
// > It is a string of ASCII characters placed between double quotes..
ETag: `"${chainTipTag}"`,
});
}
/**
* Parses the etag values from a raw `If-None-Match` request header value.
* The wrapping double quotes (if any) and validation prefix (if any) are stripped.
* The parsing is permissive to account for commonly non-spec-compliant clients, proxies, CDNs, etc.
* E.g. the value:
* ```js
* `"a", W/"b", c,d, "e", "f"`
* ```
* Would be parsed and returned as:
* ```js
* ['a', 'b', 'c', 'd', 'e', 'f']
* ```
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/If-None-Match#syntax
* ```
* If-None-Match: "etag_value"
* If-None-Match: "etag_value", "etag_value", ...
* If-None-Match: *
* ```
* @param ifNoneMatchHeaderValue - raw header value
* @returns an array of etag values
*/
export function parseIfNoneMatchHeader(
ifNoneMatchHeaderValue: string | undefined
): string[] | undefined {
if (!ifNoneMatchHeaderValue) {
return undefined;
}
// Strip wrapping double quotes like `"hello"` and the ETag validation-prefix like `W/"hello"`.
// The API returns compliant, strong-validation ETags (double quoted ASCII), but can't control what
// clients, proxies, CDNs, etc may provide.
const normalized = /^(?:"|W\/")?(.*?)"?$/gi.exec(ifNoneMatchHeaderValue.trim())?.[1];
if (!normalized) {
// This should never happen unless handling a buggy request with something like `If-None-Match: ""`,
// or if there's a flaw in the above code. Log warning for now.
logger.warn(`Normalized If-None-Match header is falsy: ${ifNoneMatchHeaderValue}`);
return undefined;
} else if (normalized.includes(',')) {
// Multiple etag values provided, likely irrelevant extra values added by a proxy/CDN.
// Split on comma, also stripping quotes, weak-validation prefixes, and extra whitespace.
return normalized.split(/(?:W\/"|")?(?:\s*),(?:\s*)(?:W\/"|")?/gi);
} else {
// Single value provided (the typical case)
return [normalized];
}
}
/**
* Parse the `ETag` from the given request's `If-None-Match` header which represents the chain tip associated
* with the client's cached response. Query the current chain tip from the db, and compare the two.
* This function is also responsible for tracking the prometheus metrics associated with cache hits/misses.
* @returns `CACHE_OK` if the client's cached response is up-to-date with the current chain tip, otherwise,
* returns the current chain tip which can be used later for setting the cache control etag response header.
*/
async function checkChainTipCacheOK(
db: DataStore,
req: Request
): Promise<FoundOrNot<DbChainTip> | typeof CACHE_OK> {
const metrics = getChainTipMetrics();
const chainTip = await db.getUnanchoredChainTip();
if (!chainTip.found) {
// This should never happen unless the API is serving requests before it has synced any blocks.
return chainTip;
}
// Parse ETag values from the request's `If-None-Match` header, if any.
// Note: node.js normalizes `IncomingMessage.headers` to lowercase.
const ifNoneMatch = parseIfNoneMatchHeader(req.headers['if-none-match']);
if (ifNoneMatch === undefined || ifNoneMatch.length === 0) {
// No if-none-match header specified.
metrics.chainTipCacheNoHeader.inc();
return chainTip;
}
const chainTipTag = chainTip.result.microblockHash ?? chainTip.result.indexBlockHash;
if (ifNoneMatch.includes(chainTipTag)) {
// The client cache's ETag matches the current chain tip, so no need to re-process the request
// server-side as there will be no change in response. Record this as a "cache hit" and return CACHE_OK.
metrics.chainTipCacheHits.inc();
return CACHE_OK;
} else {
// The client cache's ETag is associated with an different block than current latest chain tip, typically
// an older block or a forked block, so the client's cached response is stale and should not be used.
// Record this as a "cache miss" and return the current chain tip.
metrics.chainTipCacheMisses.inc();
return chainTip;
}
}
/**
* Check if the request has an up-to-date cached response by comparing the `If-None-Match` request header to the
* current chain tip. If the cache is valid then a `304 Not Modified` response is sent and the route handling for
* this request is completed. If the cache is outdated, the current chain tip is added to the `Request.locals` for
* later use in setting response cache headers.
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching#freshness
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/If-None-Match
* ```md
* The If-None-Match HTTP request header makes the request conditional. For GET and HEAD methods, the server
* will return the requested resource, with a 200 status, only if it doesn't have an ETag matching the given
* ones. For other methods, the request will be processed only if the eventually existing resource's ETag
* doesn't match any of the values listed.
* ```
*/
export function getChainTipCacheHandler(db: DataStore): RequestHandler {
const requestHandler = asyncHandler(async (req, res, next) => {
const result = await checkChainTipCacheOK(db, req);
if (result === CACHE_OK) {
// Instruct the client to use the cached response via a `304 Not Modified` response header.
// This completes the handling for this request, do not call `next()` in order to skip the
// router handler used for non-cached responses.
res.status(304).send();
} else {
// Request does not have a valid cache. Store the chainTip for later
// use in setting response cache headers.
const chainTip: FoundOrNot<DbChainTip> = result;
res.locals[CHAIN_TIP_LOCAL] = chainTip;
next();
}
});
return requestHandler;
}

View File

@@ -43,6 +43,7 @@ import { createMicroblockRouter } from './routes/microblock';
import { createStatusRouter } from './routes/status';
import { createTokenRouter } from './routes/tokens/tokens';
import { createFeeRateRouter } from './routes/fee-rate';
import { setResponseNonCacheable } from './controllers/cache-controller';
export interface ApiServer {
expressApp: ExpressWithAsync;
@@ -140,6 +141,11 @@ export async function startApiServer(opts: {
app.set('json spaces', 2);
// Turn off Express's etag handling. By default CRC32 hashes are generated over response payloads
// which are useless for our use case and wastes CPU.
// See https://expressjs.com/en/api.html#etag.options.table
app.set('etag', false);
app.get('/', (req, res) => {
res.redirect(`/extended/v1/status`);
});
@@ -155,6 +161,7 @@ export async function startApiServer(opts: {
router.use('/microblock', createMicroblockRouter(datastore));
router.use('/burnchain', createBurnchainRouter(datastore));
router.use('/contract', createContractRouter(datastore));
// same here, exclude account nonce route
router.use('/address', createAddressRouter(datastore, chainId));
router.use('/search', createSearchRouter(datastore));
router.use('/info', createInfoRouter(datastore));
@@ -217,6 +224,23 @@ export async function startApiServer(opts: {
// Setup error handler (must be added at the end of the middleware stack)
app.use(((error, req, res, next) => {
if (req.method === 'GET' && res.statusCode !== 200 && res.hasHeader('ETag')) {
logger.error(
`Non-200 request has ETag: ${res.header('ETag')}, Cache-Control: ${res.header(
'Cache-Control'
)}`
);
}
if (error && res.headersSent && res.statusCode !== 200 && res.hasHeader('ETag')) {
logger.error(
`A non-200 response with an error in request processing has ETag: ${res.header(
'ETag'
)}, Cache-Control: ${res.header('Cache-Control')}`
);
}
if (!res.headersSent && (error || res.statusCode !== 200)) {
setResponseNonCacheable(res);
}
if (error && !res.headersSent) {
res.status(500);
const errorTag = uuid();

View File

@@ -8,6 +8,8 @@ import { getBlockFromDataStore } from '../controllers/db-controller';
import { timeout, waiter, has0xPrefix } from '../../helpers';
import { parseLimitQuery, parsePagingQueryInput } from '../pagination';
import { getBlockHeightPathParam } from '../query-helpers';
import { getChainTipCacheHandler, setChainTipCacheHeaders } from '../controllers/cache-controller';
import { asyncHandler } from '../async-handler';
const MAX_BLOCKS_PER_REQUEST = 30;
@@ -18,92 +20,119 @@ const parseBlockQueryLimit = parseLimitQuery({
export function createBlockRouter(db: DataStore): RouterWithAsync {
const router = addAsync(express.Router());
const cacheHandler = getChainTipCacheHandler(db);
router.get(
'/',
cacheHandler,
asyncHandler(async (req, res) => {
const limit = parseBlockQueryLimit(req.query.limit ?? 20);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
router.getAsync('/', async (req, res) => {
const limit = parseBlockQueryLimit(req.query.limit ?? 20);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
// TODO: use getBlockWithMetadata or similar to avoid transaction integrity issues from lazy resolving block tx data (primarily the contract-call ABI data)
const { results: blocks, total } = await db.getBlocks({ offset, limit });
// TODO: fix duplicate pg queries
const results = await Bluebird.mapSeries(blocks, async block => {
const blockQuery = await getBlockFromDataStore({
blockIdentifer: { hash: block.block_hash },
db,
// TODO: use getBlockWithMetadata or similar to avoid transaction integrity issues from lazy resolving block tx data (primarily the contract-call ABI data)
const { results: blocks, total } = await db.getBlocks({ offset, limit });
// TODO: fix duplicate pg queries
const results = await Bluebird.mapSeries(blocks, async block => {
const blockQuery = await getBlockFromDataStore({
blockIdentifer: { hash: block.block_hash },
db,
});
if (!blockQuery.found) {
throw new Error('unexpected block not found -- fix block enumeration query');
}
return blockQuery.result;
});
if (!blockQuery.found) {
throw new Error('unexpected block not found -- fix block enumeration query');
setChainTipCacheHeaders(res);
// TODO: block schema validation
const response: BlockListResponse = { limit, offset, total, results };
res.json(response);
})
);
router.get(
'/by_height/:height',
cacheHandler,
asyncHandler(async (req, res, next) => {
const height = getBlockHeightPathParam(req, res, next);
const block = await getBlockFromDataStore({ blockIdentifer: { height }, db });
if (!block.found) {
res.status(404).json({ error: `cannot find block by height ${height}` });
return;
}
return blockQuery.result;
});
const response: BlockListResponse = { limit, offset, total, results };
// TODO: block schema validation
res.json(response);
});
setChainTipCacheHeaders(res);
// TODO: block schema validation
res.json(block.result);
})
);
router.getAsync('/by_height/:height', async (req, res, next) => {
const height = getBlockHeightPathParam(req, res, next);
const block = await getBlockFromDataStore({ blockIdentifer: { height }, db });
if (!block.found) {
res.status(404).json({ error: `cannot find block by height ${height}` });
return;
}
// TODO: block schema validation
res.json(block.result);
});
router.get(
'/by_burn_block_height/:burnBlockHeight',
cacheHandler,
asyncHandler(async (req, res) => {
const burnBlockHeight = parseInt(req.params['burnBlockHeight'], 10);
if (!Number.isInteger(burnBlockHeight)) {
res.status(400).json({
error: `burnchain height is not a valid integer: ${req.params['burnBlockHeight']}`,
});
return;
}
if (burnBlockHeight < 1) {
res
.status(400)
.json({ error: `burnchain height is not a positive integer: ${burnBlockHeight}` });
return;
}
const block = await getBlockFromDataStore({ blockIdentifer: { burnBlockHeight }, db });
if (!block.found) {
res.status(404).json({ error: `cannot find block by height ${burnBlockHeight}` });
return;
}
setChainTipCacheHeaders(res);
// TODO: block schema validation
res.json(block.result);
})
);
router.getAsync('/by_burn_block_height/:burnBlockHeight', async (req, res) => {
const burnBlockHeight = parseInt(req.params['burnBlockHeight'], 10);
if (!Number.isInteger(burnBlockHeight)) {
return res.status(400).json({
error: `burnchain height is not a valid integer: ${req.params['burnBlockHeight']}`,
});
}
if (burnBlockHeight < 1) {
return res
.status(400)
.json({ error: `burnchain height is not a positive integer: ${burnBlockHeight}` });
}
const block = await getBlockFromDataStore({ blockIdentifer: { burnBlockHeight }, db });
if (!block.found) {
res.status(404).json({ error: `cannot find block by height ${burnBlockHeight}` });
return;
}
// TODO: block schema validation
res.json(block.result);
});
router.get(
'/:hash',
cacheHandler,
asyncHandler(async (req, res) => {
const { hash } = req.params;
router.getAsync('/:hash', async (req, res) => {
const { hash } = req.params;
if (!has0xPrefix(hash)) {
return res.redirect('/extended/v1/block/0x' + hash);
}
if (!has0xPrefix(hash)) {
return res.redirect('/extended/v1/block/0x' + hash);
}
const block = await getBlockFromDataStore({ blockIdentifer: { hash }, db });
if (!block.found) {
res.status(404).json({ error: `cannot find block by hash ${hash}` });
return;
}
setChainTipCacheHeaders(res);
// TODO: block schema validation
res.json(block.result);
})
);
const block = await getBlockFromDataStore({ blockIdentifer: { hash }, db });
if (!block.found) {
res.status(404).json({ error: `cannot find block by hash ${hash}` });
return;
}
// TODO: block schema validation
res.json(block.result);
});
router.get(
'/by_burn_block_hash/:burnBlockHash',
cacheHandler,
asyncHandler(async (req, res) => {
const { burnBlockHash } = req.params;
router.getAsync('/by_burn_block_hash/:burnBlockHash', async (req, res) => {
const { burnBlockHash } = req.params;
if (!has0xPrefix(burnBlockHash)) {
return res.redirect('/extended/v1/block/by_burn_block_hash/0x' + burnBlockHash);
}
if (!has0xPrefix(burnBlockHash)) {
return res.redirect('/extended/v1/block/by_burn_block_hash/0x' + burnBlockHash);
}
const block = await getBlockFromDataStore({ blockIdentifer: { burnBlockHash }, db });
if (!block.found) {
res.status(404).json({ error: `cannot find block by burn block hash ${burnBlockHash}` });
return;
}
// TODO: block schema validation
res.json(block.result);
});
const block = await getBlockFromDataStore({ blockIdentifer: { burnBlockHash }, db });
if (!block.found) {
res.status(404).json({ error: `cannot find block by burn block hash ${burnBlockHash}` });
return;
}
setChainTipCacheHeaders(res);
// TODO: block schema validation
res.json(block.result);
})
);
return router;
}

View File

@@ -28,6 +28,8 @@ import {
GetRawTransactionResult,
Transaction,
} from '@stacks/stacks-blockchain-api-types';
import { getChainTipCacheHandler, setChainTipCacheHeaders } from '../controllers/cache-controller';
import { asyncHandler } from '../async-handler';
const MAX_TXS_PER_REQUEST = 200;
const parseTxQueryLimit = parseLimitQuery({
@@ -50,46 +52,57 @@ const parseTxQueryEventsLimit = parseLimitQuery({
export function createTxRouter(db: DataStore): RouterWithAsync {
const router = addAsync(express.Router());
router.getAsync('/', async (req, res, next) => {
const limit = parseTxQueryLimit(req.query.limit ?? 96);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const cacheHandler = getChainTipCacheHandler(db);
const typeQuery = req.query.type;
let txTypeFilter: TransactionType[];
if (Array.isArray(typeQuery)) {
txTypeFilter = parseTxTypeStrings(typeQuery as string[]);
} else if (typeof typeQuery === 'string') {
txTypeFilter = parseTxTypeStrings([typeQuery]);
} else if (typeQuery) {
throw new Error(`Unexpected tx type query value: ${JSON.stringify(typeQuery)}`);
} else {
txTypeFilter = [];
}
router.get(
'/',
cacheHandler,
asyncHandler(async (req, res, next) => {
const limit = parseTxQueryLimit(req.query.limit ?? 96);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const includeUnanchored = isUnanchoredRequest(req, res, next);
const { results: txResults, total } = await db.getTxList({
offset,
limit,
txTypeFilter,
includeUnanchored,
});
// TODO: use getBlockWithMetadata or similar to avoid transaction integrity issues from lazy resolving block tx data (primarily the contract-call ABI data)
const results = await Bluebird.mapSeries(txResults, async tx => {
const txQuery = await getTxFromDataStore(db, { txId: tx.tx_id, dbTx: tx, includeUnanchored });
if (!txQuery.found) {
throw new Error('unexpected tx not found -- fix tx enumeration query');
const typeQuery = req.query.type;
let txTypeFilter: TransactionType[];
if (Array.isArray(typeQuery)) {
txTypeFilter = parseTxTypeStrings(typeQuery as string[]);
} else if (typeof typeQuery === 'string') {
txTypeFilter = parseTxTypeStrings([typeQuery]);
} else if (typeQuery) {
throw new Error(`Unexpected tx type query value: ${JSON.stringify(typeQuery)}`);
} else {
txTypeFilter = [];
}
return txQuery.result;
});
const response: TransactionResults = { limit, offset, total, results };
if (!isProdEnv) {
const schemaPath =
'@stacks/stacks-blockchain-api-types/api/transaction/get-transactions.schema.json';
await validate(schemaPath, response);
}
res.json(response);
});
const includeUnanchored = isUnanchoredRequest(req, res, next);
const { results: txResults, total } = await db.getTxList({
offset,
limit,
txTypeFilter,
includeUnanchored,
});
// TODO: use getBlockWithMetadata or similar to avoid transaction integrity issues from lazy resolving block tx data (primarily the contract-call ABI data)
const results = await Bluebird.mapSeries(txResults, async tx => {
const txQuery = await getTxFromDataStore(db, {
txId: tx.tx_id,
dbTx: tx,
includeUnanchored,
});
if (!txQuery.found) {
throw new Error('unexpected tx not found -- fix tx enumeration query');
}
return txQuery.result;
});
const response: TransactionResults = { limit, offset, total, results };
if (!isProdEnv) {
const schemaPath =
'@stacks/stacks-blockchain-api-types/api/transaction/get-transactions.schema.json';
await validate(schemaPath, response);
}
setChainTipCacheHeaders(res);
res.json(response);
})
);
router.getAsync('/multiple', async (req, res, next) => {
const txList: string[] = req.query.tx_id as string[];

View File

@@ -571,6 +571,14 @@ export interface DbTokenMetadataQueueEntry {
processed: boolean;
}
export interface DbChainTip {
blockHeight: number;
indexBlockHash: string;
blockHash: string;
microblockHash?: string;
microblockSequence?: number;
}
export interface DataStore extends DataStoreEventEmitter {
storeRawEventRequest(eventPath: string, payload: string): Promise<void>;
getSubdomainResolver(name: { name: string }): Promise<FoundOrNot<string>>;
@@ -591,6 +599,8 @@ export interface DataStore extends DataStoreEventEmitter {
getUnanchoredTxs(): Promise<{ txs: DbTx[] }>;
getUnanchoredChainTip(): Promise<FoundOrNot<DbChainTip>>;
getCurrentBlock(): Promise<FoundOrNot<DbBlock>>;
getCurrentBlockHeight(): Promise<FoundOrNot<number>>;
getBlocks(args: {

View File

@@ -38,6 +38,7 @@ import {
DbFungibleTokenMetadata,
DbNonFungibleTokenMetadata,
DbTokenMetadataQueueEntry,
DbChainTip,
} from './common';
import { logger, FoundOrNot } from '../helpers';
import { AddressTokenOfferingLocked, TransactionType } from '@stacks/stacks-blockchain-api-types';
@@ -215,6 +216,10 @@ export class MemoryDataStore
}
}
getUnanchoredChainTip(): Promise<FoundOrNot<DbChainTip>> {
throw new Error('not yet implemented');
}
getCurrentBlock(): Promise<FoundOrNot<DbBlock>> {
throw new Error('not yet implemented');
}

View File

@@ -89,6 +89,7 @@ import {
DbFungibleTokenMetadata,
DbTokenMetadataQueueEntry,
DbSearchResultWithMetadata,
DbChainTip,
} from './common';
import {
AddressTokenOfferingLocked,
@@ -2617,6 +2618,49 @@ export class PgDataStore
});
}
async getUnanchoredChainTip(): Promise<FoundOrNot<DbChainTip>> {
return await this.queryTx(async client => {
const result = await client.query<{
block_height: number;
index_block_hash: Buffer;
block_hash: Buffer;
microblock_hash: Buffer | null;
microblock_sequence: number | null;
}>(
`
WITH anchor_block AS (
SELECT block_height, block_hash, index_block_hash
FROM blocks
WHERE canonical = true
AND block_height = (SELECT MAX(block_height) FROM blocks)
), microblock AS (
SELECT microblock_hash, microblock_sequence
FROM microblocks, anchor_block
WHERE microblocks.parent_index_block_hash = anchor_block.index_block_hash
AND microblock_canonical = true AND canonical = true
ORDER BY microblock_sequence DESC
LIMIT 1
)
SELECT block_height, index_block_hash, block_hash, microblock_hash, microblock_sequence
FROM anchor_block LEFT JOIN microblock ON true
`
);
if (result.rowCount === 0) {
return { found: false } as const;
}
const row = result.rows[0];
const chainTipResult: DbChainTip = {
blockHeight: row.block_height,
indexBlockHash: bufferToHexPrefixString(row.index_block_hash),
blockHash: bufferToHexPrefixString(row.block_hash),
microblockHash:
row.microblock_hash === null ? undefined : bufferToHexPrefixString(row.microblock_hash),
microblockSequence: row.microblock_sequence === null ? undefined : row.microblock_sequence,
};
return { found: true, result: chainTipResult };
});
}
getBlock(blockIdentifer: BlockIdentifier): Promise<FoundOrNot<DbBlock>> {
return this.query(client => this.getBlockInternal(client, blockIdentifer));
}

View File

@@ -0,0 +1,319 @@
import * as supertest from 'supertest';
import { ChainID } from '@stacks/transactions';
import { getBlockFromDataStore } from '../api/controllers/db-controller';
import { DbBlock, DbMicroblockPartial, DbTx, DbTxTypeId } from '../datastore/common';
import { startApiServer, ApiServer } from '../api/init';
import { PgDataStore, cycleMigrations, runMigrations } from '../datastore/postgres-store';
import { PoolClient } from 'pg';
import { I32_MAX } from '../helpers';
import { parseIfNoneMatchHeader } from '../api/controllers/cache-controller';
describe('cache-control tests', () => {
let db: PgDataStore;
let client: PoolClient;
let api: ApiServer;
beforeEach(async () => {
process.env.PG_DATABASE = 'postgres';
await cycleMigrations();
db = await PgDataStore.connect();
client = await db.pool.connect();
api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' });
});
test('parse if-none-match header', () => {
// Test various combinations of etags with and without weak-validation prefix, with and without
// wrapping quotes, without and without spaces after commas.
const vectors: {
input: string | undefined;
output: string[] | undefined;
}[] = [
{ input: '""', output: undefined },
{ input: '', output: undefined },
{ input: undefined, output: undefined },
{
input: '"bfc13a64729c4290ef5b2c2730249c88ca92d82d"',
output: ['bfc13a64729c4290ef5b2c2730249c88ca92d82d'],
},
{ input: 'W/"67ab43", "54ed21", "7892dd"', output: ['67ab43', '54ed21', '7892dd'] },
{ input: '"fail space" ', output: ['fail space'] },
{ input: 'W/"5e15153d-120f"', output: ['5e15153d-120f'] },
{
input: '"<etag_value>", "<etag_value>" , "asdf"',
output: ['<etag_value>', '<etag_value>', 'asdf'],
},
{
input: '"<etag_value>","<etag_value>","asdf"',
output: ['<etag_value>', '<etag_value>', 'asdf'],
},
{
input: 'W/"<etag_value>","<etag_value>","asdf"',
output: ['<etag_value>', '<etag_value>', 'asdf'],
},
{
input: '"<etag_value>",W/"<etag_value>", W/"asdf", "abcd","123"',
output: ['<etag_value>', '<etag_value>', 'asdf', 'abcd', '123'],
},
];
expect(vectors).toBeTruthy();
for (const entry of vectors) {
const result = parseIfNoneMatchHeader(entry.input);
expect(result).toEqual(entry.output);
}
});
test('block chaintip cache control', async () => {
const addr1 = 'ST28D4Q6RCQSJ6F7TEYWQDS4N1RXYEP9YBWMYSB97';
const addr2 = 'STB44HYPYAT2BB2QE513NSP81HTMYWBJP02HPGK6';
const block1: DbBlock = {
block_hash: '0x1234',
index_block_hash: '0xdeadbeef',
parent_index_block_hash: '0x5678',
parent_block_hash: '0xff0011',
parent_microblock_hash: '',
parent_microblock_sequence: 0,
block_height: 1235,
burn_block_time: 1594647996,
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
};
await db.updateBlock(client, block1);
const tx: DbTx = {
tx_id: '0x1234',
anchor_mode: 3,
tx_index: 4,
nonce: 0,
raw_tx: Buffer.alloc(0),
index_block_hash: block1.index_block_hash,
block_hash: block1.block_hash,
block_height: 68456,
burn_block_time: 1594647995,
parent_burn_block_time: 1626122935,
type_id: DbTxTypeId.Coinbase,
coinbase_payload: Buffer.from('coinbase hi'),
status: 1,
raw_result: '0x0100000000000000000000000000000001', // u1
canonical: true,
microblock_canonical: true,
microblock_sequence: I32_MAX,
microblock_hash: '',
parent_index_block_hash: '',
parent_block_hash: '',
post_conditions: Buffer.from([0x01, 0xf5]),
fee_rate: 1234n,
sponsored: false,
sponsor_address: undefined,
sender_address: 'sender-addr',
origin_hash_mode: 1,
event_count: 0,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
};
await db.updateTx(client, tx);
const blockQuery = await getBlockFromDataStore({
blockIdentifer: { hash: block1.block_hash },
db,
});
if (!blockQuery.found) {
throw new Error('block not found');
}
const expectedResp1 = {
burn_block_time: 1594647996,
burn_block_time_iso: '2020-07-13T13:46:36.000Z',
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
canonical: true,
hash: '0x1234',
height: 1235,
parent_block_hash: '0xff0011',
parent_microblock_hash: '',
parent_microblock_sequence: 0,
txs: ['0x1234'],
microblocks_accepted: [],
microblocks_streamed: [],
execution_cost_read_count: 0,
execution_cost_read_length: 0,
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
};
expect(blockQuery.result).toEqual(expectedResp1);
const fetchBlockByHash1 = await supertest(api.server).get(
`/extended/v1/block/${block1.block_hash}`
);
expect(fetchBlockByHash1.status).toBe(200);
expect(fetchBlockByHash1.type).toBe('application/json');
expect(JSON.parse(fetchBlockByHash1.text)).toEqual(expectedResp1);
expect(fetchBlockByHash1.headers['etag']).toBe(`"${block1.index_block_hash}"`);
const fetchBlockByHashCached1 = await supertest(api.server)
.get(`/extended/v1/block/${block1.block_hash}`)
.set('If-None-Match', `"${block1.index_block_hash}"`);
expect(fetchBlockByHashCached1.status).toBe(304);
expect(fetchBlockByHashCached1.text).toBe('');
const fetchBlockByHashCacheMiss = await supertest(api.server)
.get(`/extended/v1/block/${block1.block_hash}`)
.set('If-None-Match', '"0x12345678"');
expect(fetchBlockByHashCacheMiss.status).toBe(200);
expect(fetchBlockByHashCacheMiss.type).toBe('application/json');
expect(JSON.parse(fetchBlockByHashCacheMiss.text)).toEqual(expectedResp1);
expect(fetchBlockByHashCacheMiss.headers['etag']).toBe(`"${block1.index_block_hash}"`);
const fetchBlockByHeight = await supertest(api.server).get(
`/extended/v1/block/by_height/${block1.block_height}`
);
expect(fetchBlockByHeight.status).toBe(200);
expect(fetchBlockByHeight.type).toBe('application/json');
expect(JSON.parse(fetchBlockByHeight.text)).toEqual(expectedResp1);
expect(fetchBlockByHeight.headers['etag']).toBe(`"${block1.index_block_hash}"`);
const fetchBlockByHeightCached = await supertest(api.server)
.get(`/extended/v1/block/by_height/${block1.block_height}`)
.set('If-None-Match', `"${block1.index_block_hash}"`);
expect(fetchBlockByHeightCached.status).toBe(304);
expect(fetchBlockByHeightCached.text).toBe('');
const fetchBlockByHeightCacheMiss = await supertest(api.server)
.get(`/extended/v1/block/by_height/${block1.block_height}`)
.set('If-None-Match', '"0x12345678"');
expect(fetchBlockByHashCacheMiss.status).toBe(200);
expect(fetchBlockByHeightCacheMiss.type).toBe('application/json');
expect(JSON.parse(fetchBlockByHeightCacheMiss.text)).toEqual(expectedResp1);
expect(fetchBlockByHeightCacheMiss.headers['etag']).toBe(`"${block1.index_block_hash}"`);
const mb1: DbMicroblockPartial = {
microblock_hash: '0xff01',
microblock_sequence: 0,
microblock_parent_hash: block1.block_hash,
parent_index_block_hash: block1.index_block_hash,
parent_burn_block_height: 123,
parent_burn_block_hash: '0xaa',
parent_burn_block_time: 1626122935,
};
const mbTx1: DbTx = {
tx_id: '0x02',
tx_index: 0,
anchor_mode: 3,
nonce: 0,
raw_tx: Buffer.alloc(0),
type_id: DbTxTypeId.TokenTransfer,
status: 1,
raw_result: '0x0100000000000000000000000000000001', // u1
canonical: true,
post_conditions: Buffer.from([0x01, 0xf5]),
fee_rate: 1234n,
sponsored: false,
sender_address: addr1,
sponsor_address: undefined,
origin_hash_mode: 1,
token_transfer_amount: 50n,
token_transfer_memo: Buffer.from('hi'),
token_transfer_recipient_address: addr2,
event_count: 1,
parent_index_block_hash: block1.index_block_hash,
parent_block_hash: block1.block_hash,
microblock_canonical: true,
microblock_sequence: mb1.microblock_sequence,
microblock_hash: mb1.microblock_hash,
parent_burn_block_time: mb1.parent_burn_block_time,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
// These properties aren't known until the next anchor block that accepts this microblock.
index_block_hash: '',
block_hash: '',
burn_block_time: -1,
// These properties can be determined with a db query, they are set while the db is inserting them.
block_height: -1,
};
await db.updateMicroblocks({
microblocks: [mb1],
txs: [
{
tx: mbTx1,
stxLockEvents: [],
stxEvents: [],
ftEvents: [],
nftEvents: [],
contractLogEvents: [],
smartContracts: [],
names: [],
namespaces: [],
},
],
});
const chainTip2 = await db.getUnanchoredChainTip();
expect(chainTip2.found).toBeTruthy();
expect(chainTip2.result?.blockHash).toBe(block1.block_hash);
expect(chainTip2.result?.blockHeight).toBe(block1.block_height);
expect(chainTip2.result?.indexBlockHash).toBe(block1.index_block_hash);
expect(chainTip2.result?.microblockHash).toBe(mb1.microblock_hash);
expect(chainTip2.result?.microblockSequence).toBe(mb1.microblock_sequence);
const expectedResp2 = {
burn_block_time: 1594647996,
burn_block_time_iso: '2020-07-13T13:46:36.000Z',
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
canonical: true,
hash: '0x1234',
height: 1235,
parent_block_hash: '0xff0011',
parent_microblock_hash: '',
parent_microblock_sequence: 0,
txs: ['0x1234'],
microblocks_accepted: [],
microblocks_streamed: ['0xff01'],
execution_cost_read_count: 0,
execution_cost_read_length: 0,
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
};
const fetchBlockByHash2 = await supertest(api.server).get(
`/extended/v1/block/${block1.block_hash}`
);
expect(fetchBlockByHash2.status).toBe(200);
expect(fetchBlockByHash2.type).toBe('application/json');
expect(JSON.parse(fetchBlockByHash2.text)).toEqual(expectedResp2);
expect(fetchBlockByHash2.headers['etag']).toBe(`"${mb1.microblock_hash}"`);
const fetchBlockByHashCached2 = await supertest(api.server)
.get(`/extended/v1/block/${block1.block_hash}`)
.set('If-None-Match', `"${mb1.microblock_hash}"`);
expect(fetchBlockByHashCached2.status).toBe(304);
expect(fetchBlockByHashCached2.text).toBe('');
});
afterEach(async () => {
await api.terminate();
client.release();
await db?.close();
await runMigrations(undefined, 'down');
});
});

View File

@@ -323,6 +323,14 @@ describe('microblock tests', () => {
],
});
const chainTip1 = await db.getUnanchoredChainTip();
expect(chainTip1.found).toBeTruthy();
expect(chainTip1.result?.blockHash).toBe(block1.block_hash);
expect(chainTip1.result?.blockHeight).toBe(block1.block_height);
expect(chainTip1.result?.indexBlockHash).toBe(block1.index_block_hash);
expect(chainTip1.result?.microblockHash).toBeUndefined();
expect(chainTip1.result?.microblockSequence).toBeUndefined();
const mb1: DbMicroblockPartial = {
microblock_hash: '0xff01',
microblock_sequence: 0,
@@ -411,6 +419,14 @@ describe('microblock tests', () => {
],
});
const chainTip2 = await db.getUnanchoredChainTip();
expect(chainTip2.found).toBeTruthy();
expect(chainTip2.result?.blockHash).toBe(block1.block_hash);
expect(chainTip2.result?.blockHeight).toBe(block1.block_height);
expect(chainTip2.result?.indexBlockHash).toBe(block1.index_block_hash);
expect(chainTip2.result?.microblockHash).toBe(mb1.microblock_hash);
expect(chainTip2.result?.microblockSequence).toBe(mb1.microblock_sequence);
const txListResult1 = await supertest(api.server).get(`/extended/v1/tx`);
const { body: txListBody1 }: { body: TransactionResults } = txListResult1;
expect(txListBody1.results).toHaveLength(1);