feat: GET /extended/v1/burn_block (#1766)

* feat: new endpoint to get burn blocks `/extended/v1/burn_block`

* feat: return stacks block hashes in burn block results

* test: add tests for `/extended/v1/burn_block`

* ci: fix openapi lint
This commit is contained in:
Matthew Little
2023-12-12 17:12:28 +01:00
committed by GitHub
parent 7c45f53622
commit cb38b6811c
14 changed files with 500 additions and 1 deletions

View File

@@ -0,0 +1,19 @@
{
"limit": 1,
"offset": 0,
"total": 21707,
"results": [
{
"burn_block_time": 1626281749,
"burn_block_time_iso": "2021-07-14T16:55:49.000Z",
"burn_block_hash": "0x0000000000000000000ea16f8e906e85ee1cb4dff1e5424e93843b3cec8b0bcb",
"burn_block_height": 691014,
"stacks_blocks": [
"0x54647c277eefe60519b407f2c897749005fdb7f831034135063b2ee43fdacb04",
"0xdaf61d2b355f35c94cf019af99aeb73d8e7db7301c7cd693a464ebd1cfc2228c",
"0xb9e9b308cf9621ecbf66ca7b4689fe384b9b67c4588ec827d8163ab602fb935e",
"0x754562cba6ec243f90485e97778ab472f462fd123ef5b83cc79d8759ca8875f5"
]
}
]
}

View File

@@ -0,0 +1,29 @@
{
"description": "GET request that returns burn blocks",
"additionalProperties": false,
"title": "BurnBlockListResponse",
"type": "object",
"required": ["results", "limit", "offset", "total"],
"properties": {
"limit": {
"type": "integer",
"maximum": 30,
"description": "The number of burn blocks to return"
},
"offset": {
"type": "integer",
"description": "The number to burn blocks to skip (starting at `0`)",
"default": 0
},
"total": {
"type": "integer",
"description": "The number of burn blocks available (regardless of filter parameters)"
},
"results": {
"type": "array",
"items": {
"$ref": "../../entities/blocks/burn-block.schema.json"
}
}
}
}

View File

@@ -0,0 +1,12 @@
{
"burn_block_time": 1594233639,
"burn_block_time_iso": "2020-08-27T16:41:26.000Z",
"burn_block_hash": "0xb154c008df2101023a6d0d54986b3964cee58119eed14f5bed98e15678e18fe2",
"burn_block_height": 654439,
"stacks_blocks": [
"0x54647c277eefe60519b407f2c897749005fdb7f831034135063b2ee43fdacb04",
"0xdaf61d2b355f35c94cf019af99aeb73d8e7db7301c7cd693a464ebd1cfc2228c",
"0xb9e9b308cf9621ecbf66ca7b4689fe384b9b67c4588ec827d8163ab602fb935e",
"0x754562cba6ec243f90485e97778ab472f462fd123ef5b83cc79d8759ca8875f5"
]
}

View File

@@ -0,0 +1,38 @@
{
"title": "BurnBlock",
"description": "A burn block",
"type": "object",
"additionalProperties": false,
"required": [
"burn_block_time",
"burn_block_time_iso",
"burn_block_hash",
"burn_block_height",
"stacks_blocks"
],
"properties": {
"burn_block_time": {
"type": "number",
"description": "Unix timestamp (in seconds) indicating when this block was mined."
},
"burn_block_time_iso": {
"type": "string",
"description": "An ISO 8601 (YYYY-MM-DDTHH:mm:ss.sssZ) indicating when this block was mined."
},
"burn_block_hash": {
"type": "string",
"description": "Hash of the anchor chain block"
},
"burn_block_height": {
"type": "integer",
"description": "Height of the anchor chain block"
},
"stacks_blocks": {
"type": "array",
"items": {
"type": "string"
},
"description": "Hashes of the Stacks blocks included in the burn block"
}
}
}

45
docs/generated.d.ts vendored
View File

@@ -13,6 +13,7 @@ export type SchemaMergeRootStub =
| AddressTransactionsWithTransfersListResponse
| AddressTransactionsListResponse
| BlockListResponse
| BurnBlockListResponse
| BnsError
| BnsFetchFileZoneResponse
| BnsGetAllNamesResponse
@@ -114,6 +115,7 @@ export type SchemaMergeRootStub =
| NftBalance
| StxBalance
| Block
| BurnBlock
| BurnchainRewardSlotHolder
| BurnchainReward
| BurnchainRewardsTotal
@@ -1274,6 +1276,49 @@ export interface Block {
[k: string]: number | undefined;
};
}
/**
* GET request that returns burn blocks
*/
export interface BurnBlockListResponse {
/**
* The number of burn blocks to return
*/
limit: number;
/**
* The number to burn blocks to skip (starting at `0`)
*/
offset: number;
/**
* The number of burn blocks available (regardless of filter parameters)
*/
total: number;
results: BurnBlock[];
}
/**
* A burn block
*/
export interface BurnBlock {
/**
* Unix timestamp (in seconds) indicating when this block was mined.
*/
burn_block_time: number;
/**
* An ISO 8601 (YYYY-MM-DDTHH:mm:ss.sssZ) indicating when this block was mined.
*/
burn_block_time_iso: string;
/**
* Hash of the anchor chain block
*/
burn_block_hash: string;
/**
* Height of the anchor chain block
*/
burn_block_height: number;
/**
* Hashes of the Stacks blocks included in the burn block
*/
stacks_blocks: string[];
}
/**
* Error
*/

View File

@@ -612,6 +612,54 @@ paths:
schema:
$ref: ./api/microblocks/get-unanchored-txs.schema.json
/extended/v1/burn_block:
get:
summary: Get recent burn blocks
description: |
Retrieves a list of recent burn blocks
tags:
- Blocks
operationId: get_burn_block_list
parameters:
- name: limit
in: query
description: max number of burn blocks to fetch
required: false
schema:
type: integer
default: 20
maximum: 30
- name: offset
in: query
description: index of first burn block to fetch
required: false
schema:
type: integer
example: 42000
- name: height
in: query
description: filter by burn block height
required: false
schema:
type: integer
example: 42000
- name: hash
in: query
description: filter by burn block hash or the constant 'latest' to filter for the most recent burn block
required: false
schema:
type: string
example: "0x4839a8b01cfb39ffcc0d07d3db31e848d5adf5279d529ed5062300b9f353ff79"
responses:
200:
description: List of burn blocks
content:
application/json:
schema:
$ref: ./api/blocks/get-burn-blocks.schema.json
example:
$ref: ./api/blocks/get-burn-blocks.example.json
/extended/v1/block:
get:
summary: Get recent blocks

View File

@@ -16,6 +16,7 @@ import {
AbstractTransaction,
BaseTransaction,
Block,
BurnBlock,
CoinbaseTransactionMetadata,
ContractCallTransactionMetadata,
MempoolTransaction,
@@ -530,6 +531,35 @@ export async function getMicroblockFromDataStore({
};
}
export async function getBurnBlocksFromDataStore(args: {
db: PgStore;
limit: number;
offset: number;
height: number | null;
hash: 'latest' | string | null;
}): Promise<{ total: number; results: BurnBlock[] }> {
const query = await args.db.getBurnBlocks({
limit: args.limit,
offset: args.offset,
height: args.height,
hash: args.hash,
});
const results = query.results.map(r => {
const burnBlock: BurnBlock = {
burn_block_time: r.burn_block_time,
burn_block_time_iso: unixEpochToIso(r.burn_block_time),
burn_block_hash: r.burn_block_hash,
burn_block_height: r.burn_block_height,
stacks_blocks: r.stacks_blocks,
};
return burnBlock;
});
return {
total: query.total,
results,
};
}
export async function getMicroblocksFromDataStore(args: {
db: PgStore;
limit: number;

View File

@@ -45,6 +45,7 @@ import { createPox3EventsRouter } from './routes/pox3';
import { createStackingRouter } from './routes/stacking';
import { logger, loggerMiddleware } from '../logger';
import { SERVER_VERSION, isPgConnectionError, isProdEnv, waiter } from '@hirosystems/api-toolkit';
import { createBurnBlockRouter } from './routes/burn-block';
export interface ApiServer {
expressApp: express.Express;
@@ -184,6 +185,7 @@ export async function startApiServer(opts: {
router.use('/tx', createTxRouter(datastore));
router.use('/block', createBlockRouter(datastore));
router.use('/microblock', createMicroblockRouter(datastore));
router.use('/burn_block', createBurnBlockRouter(datastore));
router.use('/burnchain', createBurnchainRouter(datastore));
router.use('/contract', createContractRouter(datastore));
// same here, exclude account nonce route

View File

@@ -35,6 +35,7 @@ export enum ResourceType {
Token,
Pox2Event,
Stacker,
BurnBlock,
}
const pagingQueryLimits: Record<ResourceType, { defaultLimit: number; maxLimit: number }> = {
@@ -42,6 +43,10 @@ const pagingQueryLimits: Record<ResourceType, { defaultLimit: number; maxLimit:
defaultLimit: 20,
maxLimit: 30,
},
[ResourceType.BurnBlock]: {
defaultLimit: 20,
maxLimit: 30,
},
[ResourceType.Tx]: {
defaultLimit: 20,
maxLimit: 50,

View File

@@ -111,6 +111,53 @@ export function getBlockParams(
}
}
/**
* Parses a block hash value from a given request query param.
* If an error is encountered while parsing the param then a 400 response with an error message is sent and the function throws.
* @param queryParamName - name of the query param
* @param paramRequired - if true then the function will throw and return a 400 if the param is missing, if false then the function will return null if the param is missing
*/
export function getBlockHashQueryParam<TRequired extends boolean>(
queryParamName: string,
paramRequired: TRequired,
req: Request,
res: Response,
next: NextFunction
): TRequired extends true ? string | never : string | null {
if (!(queryParamName in req.query)) {
if (paramRequired) {
handleBadRequest(
res,
next,
`Request is missing required "${queryParamName}" query parameter`
);
} else {
return null as TRequired extends true ? string : string | null;
}
}
const hashParamVal = req.query[queryParamName];
if (typeof hashParamVal !== 'string') {
handleBadRequest(
res,
next,
`Unexpected type for block hash query parameter: ${JSON.stringify(hashParamVal)}`
);
}
// Extract the hash part, ignoring '0x' if present
const match = hashParamVal.match(/^(0x)?([a-fA-F0-9]{64})$/i);
if (!match) {
handleBadRequest(
res,
next,
"Invalid hash string. Ensure it is 64 hexadecimal characters long, with an optional '0x' prefix"
);
}
// Normalize the string
return '0x' + match[2].toLowerCase();
}
/**
* Parses a block height value from a given request query param.
* If an error is encountered while parsing the param then a 400 response with an error message is sent and the function throws.

View File

@@ -0,0 +1,42 @@
import * as express from 'express';
import { BurnBlockListResponse } from '@stacks/stacks-blockchain-api-types';
import { getBurnBlocksFromDataStore } from '../controllers/db-controller';
import { getPagingQueryLimit, parsePagingQueryInput, ResourceType } from '../pagination';
import { getBlockHashQueryParam, getBlockHeightQueryParam } from '../query-helpers';
import { getETagCacheHandler, setETagCacheHeaders } from '../controllers/cache-controller';
import { asyncHandler } from '../async-handler';
import { PgStore } from '../../datastore/pg-store';
export function createBurnBlockRouter(db: PgStore): express.Router {
const router = express.Router();
const cacheHandler = getETagCacheHandler(db);
router.get(
'/',
cacheHandler,
asyncHandler(async (req, res, next) => {
const limit = getPagingQueryLimit(ResourceType.BurnBlock, req.query.limit);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const height =
getBlockHeightQueryParam('height', false, req, res, next) ??
getBlockHeightQueryParam('block_height', false, req, res, next);
let hash = req.query.hash === 'latest' ? 'latest' : null;
if (!hash) {
hash = getBlockHashQueryParam('hash', false, req, res, next);
}
const { results, total } = await getBurnBlocksFromDataStore({
offset,
limit,
db,
height,
hash,
});
setETagCacheHeaders(res);
const response: BurnBlockListResponse = { limit, offset, total, results };
res.json(response);
})
);
return router;
}

View File

@@ -46,6 +46,14 @@ export interface DbMicroblock extends DbMicroblockPartial {
block_hash: string;
}
export interface DbBurnBlock {
block_hash: string;
burn_block_time: number;
burn_block_hash: string;
burn_block_height: number;
stacks_blocks: string[];
}
export interface DbBurnchainReward {
canonical: boolean;
burn_block_hash: string;

View File

@@ -29,6 +29,7 @@ import {
DbBnsNamespace,
DbBnsSubdomain,
DbBnsZoneFile,
DbBurnBlock,
DbBurnchainReward,
DbChainTip,
DbEvent,
@@ -386,6 +387,56 @@ export class PgStore extends BasePgStore {
return { found: true, result: block } as const;
}
async getBurnBlocks({
limit,
offset,
height,
hash,
}: {
limit: number;
offset: number;
height: number | null;
hash: 'latest' | string | null;
}): Promise<{ results: DbBurnBlock[]; total: number }> {
return await this.sqlTransaction(async sql => {
const countQuery = await sql<{ burn_block_height: number; count: number }[]>`
SELECT burn_block_height, block_count AS count FROM chain_tip
`;
const heightFilter = height ? sql`AND burn_block_height = ${height}` : sql``;
const hashFilter =
hash === 'latest'
? sql`AND burn_block_height = ${countQuery[0].burn_block_height}`
: hash
? sql`AND burn_block_hash = ${hash}`
: sql``;
const block_count = countQuery[0].count;
const blocksQuery = await sql<DbBurnBlock[]>`
SELECT DISTINCT ON (burn_block_height)
burn_block_time,
burn_block_hash,
burn_block_height,
ARRAY_AGG(block_hash) OVER (
PARTITION BY burn_block_height
ORDER BY block_height DESC
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
) AS stacks_blocks
FROM blocks
WHERE canonical = true
${heightFilter}
${hashFilter}
ORDER BY burn_block_height DESC, block_height DESC
LIMIT ${limit}
OFFSET ${offset}
`;
const blocks = blocksQuery.map(r => r);
return {
results: blocks,
total: block_count,
};
});
}
/**
* Returns Block information with metadata, including accepted and streamed microblocks hash
* @returns `BlocksWithMetadata` object including list of Blocks with metadata and total count.

View File

@@ -9,7 +9,7 @@ import {
DataStoreBlockUpdateData,
} from '../datastore/common';
import { startApiServer, ApiServer } from '../api/init';
import { I32_MAX } from '../helpers';
import { I32_MAX, unixEpochToIso } from '../helpers';
import { TestBlockBuilder, TestMicroblockStreamBuilder } from '../test-utils/test-builders';
import { PgWriteStore } from '../datastore/pg-write-store';
import { PgSqlClient, bufferToHex } from '@hirosystems/api-toolkit';
@@ -276,6 +276,129 @@ describe('block tests', () => {
expect(result.body).toEqual(expectedResp);
});
test('/burn_block', async () => {
const burnBlock1 = {
burn_block_hash: '0x5678111111111111111111111111111111111111111111111111111111111111',
burn_block_height: 5,
burn_block_time: 1702386592,
};
const burnBlock2 = {
burn_block_hash: '0x5678211111111111111111111111111111111111111111111111111111111111',
burn_block_height: 7,
burn_block_time: 1702386678,
};
const stacksBlock1 = {
block_height: 1,
block_hash: '0x1234111111111111111111111111111111111111111111111111111111111111',
index_block_hash: '0xabcd111111111111111111111111111111111111111111111111111111111111',
parent_index_block_hash: '0x0000000000000000000000000000000000000000000000000000000000000000',
burn_block_hash: burnBlock1.burn_block_hash,
burn_block_height: burnBlock1.burn_block_height,
burn_block_time: burnBlock1.burn_block_time,
};
const stacksBlock2 = {
block_height: 2,
block_hash: '0x1234211111111111111111111111111111111111111111111111111111111111',
index_block_hash: '0xabcd211111111111111111111111111111111111111111111111111111111111',
parent_index_block_hash: stacksBlock1.index_block_hash,
burn_block_hash: burnBlock2.burn_block_hash,
burn_block_height: burnBlock2.burn_block_height,
burn_block_time: burnBlock2.burn_block_time,
};
const stacksBlock3 = {
block_height: 3,
block_hash: '0x1234311111111111111111111111111111111111111111111111111111111111',
index_block_hash: '0xabcd311111111111111111111111111111111111111111111111111111111111',
parent_index_block_hash: stacksBlock2.index_block_hash,
burn_block_hash: burnBlock2.burn_block_hash,
burn_block_height: burnBlock2.burn_block_height,
burn_block_time: burnBlock2.burn_block_time,
};
const stacksBlock4 = {
block_height: 4,
block_hash: '0x1234411111111111111111111111111111111111111111111111111111111111',
index_block_hash: '0xabcd411111111111111111111111111111111111111111111111111111111111',
parent_index_block_hash: stacksBlock3.index_block_hash,
burn_block_hash: burnBlock2.burn_block_hash,
burn_block_height: burnBlock2.burn_block_height,
burn_block_time: burnBlock2.burn_block_time,
};
const stacksBlocks = [stacksBlock1, stacksBlock2, stacksBlock3, stacksBlock4];
for (const block of stacksBlocks) {
const dbBlock = new TestBlockBuilder({
block_hash: block.block_hash,
index_block_hash: block.index_block_hash,
parent_index_block_hash: block.parent_index_block_hash,
block_height: block.block_height,
burn_block_hash: block.burn_block_hash,
burn_block_height: block.burn_block_height,
burn_block_time: block.burn_block_time,
}).build();
await db.update(dbBlock);
}
const result = await supertest(api.server).get(`/extended/v1/burn_block/`);
expect(result.body.results).toEqual([
{
burn_block_hash: burnBlock2.burn_block_hash,
burn_block_height: burnBlock2.burn_block_height,
burn_block_time: burnBlock2.burn_block_time,
burn_block_time_iso: unixEpochToIso(burnBlock2.burn_block_time),
stacks_blocks: [stacksBlock4.block_hash, stacksBlock3.block_hash, stacksBlock2.block_hash],
},
{
burn_block_hash: burnBlock1.burn_block_hash,
burn_block_height: burnBlock1.burn_block_height,
burn_block_time: burnBlock1.burn_block_time,
burn_block_time_iso: unixEpochToIso(burnBlock1.burn_block_time),
stacks_blocks: [stacksBlock1.block_hash],
},
]);
// test 'latest' filter
const result2 = await supertest(api.server).get(`/extended/v1/burn_block?hash=latest`);
expect(result2.body.results).toEqual([
{
burn_block_hash: stacksBlocks.at(-1)?.burn_block_hash,
burn_block_height: stacksBlocks.at(-1)?.burn_block_height,
burn_block_time: stacksBlocks.at(-1)?.burn_block_time,
burn_block_time_iso: unixEpochToIso(stacksBlocks.at(-1)?.burn_block_time ?? 0),
stacks_blocks: [stacksBlock4.block_hash, stacksBlock3.block_hash, stacksBlock2.block_hash],
},
]);
// test hash filter
const result3 = await supertest(api.server).get(
`/extended/v1/burn_block?hash=${stacksBlock1.burn_block_hash}`
);
expect(result3.body.results).toEqual([
{
burn_block_hash: stacksBlock1.burn_block_hash,
burn_block_height: stacksBlock1.burn_block_height,
burn_block_time: stacksBlock1.burn_block_time,
burn_block_time_iso: unixEpochToIso(stacksBlock1.burn_block_time),
stacks_blocks: [stacksBlock1.block_hash],
},
]);
// test height filter
const result4 = await supertest(api.server).get(
`/extended/v1/burn_block?height=${stacksBlock1.burn_block_height}`
);
expect(result4.body.results).toEqual([
{
burn_block_hash: stacksBlock1.burn_block_hash,
burn_block_height: stacksBlock1.burn_block_height,
burn_block_time: stacksBlock1.burn_block_time,
burn_block_time_iso: unixEpochToIso(stacksBlock1.burn_block_time),
stacks_blocks: [stacksBlock1.block_hash],
},
]);
});
test('block tx list excludes non-canonical', async () => {
const block1 = new TestBlockBuilder({ block_hash: '0x0001', index_block_hash: '0x0001' })
.addTx({ tx_id: '0x0001' })