mirror of
https://github.com/alexgo-io/stacks-blockchain-api.git
synced 2026-01-12 08:34:40 +08:00
feat: add /extended/v2/blocks endpoint with burn block filters (#1769)
* chore: add typebox * feat: add blocks v2 endpoint * chore: add types * fix: v2 query * fix: tests * chore: tweaks * docs: openapi * fix: exports * fix: tests * fix: tokens router * fix: adjust burn block hash regex
This commit is contained in:
26
docs/api/blocks/get-nakamoto-blocks.example.json
Normal file
26
docs/api/blocks/get-nakamoto-blocks.example.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"total": 21707,
|
||||
"results": [
|
||||
{
|
||||
"canonical": true,
|
||||
"height": 21698,
|
||||
"hash": "0x9be3e38eab9c7d094fd51792383c66706838d6392e95bc05cc730b8f7520e352",
|
||||
"parent_block_hash": "0x76ee36d1d6c88e56b5c0e80f0d7bc7d3492141faf1b900efb19fcd00457d4654",
|
||||
"burn_block_time": 1626281749,
|
||||
"burn_block_time_iso": "2021-07-14T16:55:49.000Z",
|
||||
"burn_block_hash": "0x0000000000000000000ea16f8e906e85ee1cb4dff1e5424e93843b3cec8b0bcb",
|
||||
"burn_block_height": 691014,
|
||||
"miner_txid": "0x118f7122a69441d13e6a3dfd4c3b0f9950be25195bb8126aae7fadea1aa9185d",
|
||||
"txs": [
|
||||
"0x76f58b2eaff65a07a5971b241c4e71fee92ee0f9396809f911f90839f9004cac",
|
||||
"0x32972d9052b068f218f6e13451f7aff937099b74bbf95fac7d9402295b1b3941",
|
||||
"0x8cd30724c02a9cc1d8879a34dc136ebfdb2008420badcfb5947b92f85ebce79b",
|
||||
"0xf5c1577f42d3753a508101e045dd2dc60491eb0aa552e0ecd0ad37cc697143f4",
|
||||
"0x35e4c20e2838f999e0cf0b40c5fabce154c2df1912a1074150d26784c53f7a20",
|
||||
"0x501eb42b82e5b7a7350b47fa143cd4e90bb46d43e4a7d22830b2bf2aa70b7922"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
29
docs/api/blocks/get-nakamoto-blocks.schema.json
Normal file
29
docs/api/blocks/get-nakamoto-blocks.schema.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"description": "GET request that returns blocks",
|
||||
"additionalProperties": false,
|
||||
"title": "NakamotoBlockListResponse",
|
||||
"type": "object",
|
||||
"required": ["results", "limit", "offset", "total"],
|
||||
"properties": {
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"maximum": 30,
|
||||
"description": "The number of blocks to return"
|
||||
},
|
||||
"offset": {
|
||||
"type": "integer",
|
||||
"description": "The number to blocks to skip (starting at `0`)",
|
||||
"default": 0
|
||||
},
|
||||
"total": {
|
||||
"type": "integer",
|
||||
"description": "The number of blocks available"
|
||||
},
|
||||
"results": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "../../entities/blocks/nakamoto-block.schema.json"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
23
docs/entities/blocks/nakamoto-block.example.json
Normal file
23
docs/entities/blocks/nakamoto-block.example.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"canonical": true,
|
||||
"height": 3275,
|
||||
"hash": "0xe77ba8cf6bb7c0e4f64adc83356289ed467d31a22354907b4bb814590058430f",
|
||||
"index_block_hash": "0x918697ef63f9d8bdf844c3312b299e72a231cde542f3173f7755bb8c1cdaf3a7",
|
||||
"parent_block_hash": "0x75ab21ef25cbff2caa14c27d830ed7886a4d1522e1b6f9e5dc3b59ccf73ed49f",
|
||||
"parent_index_block_hash": "0x4262db117659d1ca9406970c8f44ffd3d8f11f8e18c591d2e3960f4070107756",
|
||||
"burn_block_time": 1594233639,
|
||||
"burn_block_time_iso": "2020-08-27T16:41:26.000Z",
|
||||
"burn_block_hash": "0xb154c008df2101023a6d0d54986b3964cee58119eed14f5bed98e15678e18fe2",
|
||||
"burn_block_height": 654439,
|
||||
"miner_txid": "0xd7d56070277ccd87b42acf0c91f915dd181f9db4cf878a4e95518bc397c240cc",
|
||||
"txs": [
|
||||
"0x4262db117659d1ca9406970c8f44ffd3d8f11f8e18c591d2e3960f4070107754",
|
||||
"0x383632cd3b5464dffb684082750fcfaddd1f52625bbb9f884ed8f45d2b1f0547",
|
||||
"0xc99fe597e44b8bd15a50eec660c6e679a7144a5a8553d214b9d5f1406d278c22"
|
||||
],
|
||||
"execution_cost_read_count": 2477,
|
||||
"execution_cost_read_length": 1659409,
|
||||
"execution_cost_runtime": 2520952000,
|
||||
"execution_cost_write_count": 608,
|
||||
"execution_cost_write_length": 80170
|
||||
}
|
||||
99
docs/entities/blocks/nakamoto-block.schema.json
Normal file
99
docs/entities/blocks/nakamoto-block.schema.json
Normal file
@@ -0,0 +1,99 @@
|
||||
{
|
||||
"title": "NakamotoBlock",
|
||||
"description": "A block",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"canonical",
|
||||
"height",
|
||||
"hash",
|
||||
"index_block_hash",
|
||||
"parent_block_hash",
|
||||
"parent_index_block_hash",
|
||||
"txs",
|
||||
"burn_block_time",
|
||||
"burn_block_time_iso",
|
||||
"burn_block_hash",
|
||||
"burn_block_height",
|
||||
"miner_txid",
|
||||
"execution_cost_read_count",
|
||||
"execution_cost_read_length",
|
||||
"execution_cost_runtime",
|
||||
"execution_cost_write_count",
|
||||
"execution_cost_write_length"
|
||||
],
|
||||
"properties": {
|
||||
"canonical": {
|
||||
"type": "boolean",
|
||||
"description": "Set to `true` if block corresponds to the canonical chain tip"
|
||||
},
|
||||
"height": {
|
||||
"type": "integer",
|
||||
"description": "Height of the block"
|
||||
},
|
||||
"hash": {
|
||||
"type": "string",
|
||||
"description": "Hash representing the block"
|
||||
},
|
||||
"index_block_hash": {
|
||||
"type": "string",
|
||||
"description": "The only hash that can uniquely identify an anchored block or an unconfirmed state trie"
|
||||
},
|
||||
"parent_block_hash": {
|
||||
"type": "string",
|
||||
"description": "Hash of the parent block"
|
||||
},
|
||||
"parent_index_block_hash": {
|
||||
"type": "string",
|
||||
"description": "Index block hash of the parent block"
|
||||
},
|
||||
"burn_block_time": {
|
||||
"type": "number",
|
||||
"description": "Unix timestamp (in seconds) indicating when this block was mined."
|
||||
},
|
||||
"burn_block_time_iso": {
|
||||
"type": "string",
|
||||
"description": "An ISO 8601 (YYYY-MM-DDTHH:mm:ss.sssZ) indicating when this block was mined."
|
||||
},
|
||||
"burn_block_hash": {
|
||||
"type": "string",
|
||||
"description": "Hash of the anchor chain block"
|
||||
},
|
||||
"burn_block_height": {
|
||||
"type": "integer",
|
||||
"description": "Height of the anchor chain block"
|
||||
},
|
||||
"miner_txid": {
|
||||
"type": "string",
|
||||
"description": "Anchor chain transaction ID"
|
||||
},
|
||||
"txs": {
|
||||
"type": "array",
|
||||
"description": "List of transactions included in the block",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "Transaction ID"
|
||||
}
|
||||
},
|
||||
"execution_cost_read_count": {
|
||||
"type": "integer",
|
||||
"description": "Execution cost read count."
|
||||
},
|
||||
"execution_cost_read_length": {
|
||||
"type": "integer",
|
||||
"description": "Execution cost read length."
|
||||
},
|
||||
"execution_cost_runtime": {
|
||||
"type": "integer",
|
||||
"description": "Execution cost runtime."
|
||||
},
|
||||
"execution_cost_write_count": {
|
||||
"type": "integer",
|
||||
"description": "Execution cost write count."
|
||||
},
|
||||
"execution_cost_write_length": {
|
||||
"type": "integer",
|
||||
"description": "Execution cost write length."
|
||||
}
|
||||
}
|
||||
}
|
||||
93
docs/generated.d.ts
vendored
93
docs/generated.d.ts
vendored
@@ -14,6 +14,7 @@ export type SchemaMergeRootStub =
|
||||
| AddressTransactionsListResponse
|
||||
| BlockListResponse
|
||||
| BurnBlockListResponse
|
||||
| NakamotoBlockListResponse
|
||||
| BnsError
|
||||
| BnsFetchFileZoneResponse
|
||||
| BnsGetAllNamesResponse
|
||||
@@ -116,6 +117,7 @@ export type SchemaMergeRootStub =
|
||||
| StxBalance
|
||||
| Block
|
||||
| BurnBlock
|
||||
| NakamotoBlock
|
||||
| BurnchainRewardSlotHolder
|
||||
| BurnchainReward
|
||||
| BurnchainRewardsTotal
|
||||
@@ -1331,6 +1333,97 @@ export interface BurnBlock {
|
||||
*/
|
||||
stacks_blocks: string[];
|
||||
}
|
||||
/**
|
||||
* GET request that returns blocks
|
||||
*/
|
||||
export interface NakamotoBlockListResponse {
|
||||
/**
|
||||
* The number of blocks to return
|
||||
*/
|
||||
limit: number;
|
||||
/**
|
||||
* The number to blocks to skip (starting at `0`)
|
||||
*/
|
||||
offset: number;
|
||||
/**
|
||||
* The number of blocks available
|
||||
*/
|
||||
total: number;
|
||||
results: NakamotoBlock[];
|
||||
}
|
||||
/**
|
||||
* A block
|
||||
*/
|
||||
export interface NakamotoBlock {
|
||||
/**
|
||||
* Set to `true` if block corresponds to the canonical chain tip
|
||||
*/
|
||||
canonical: boolean;
|
||||
/**
|
||||
* Height of the block
|
||||
*/
|
||||
height: number;
|
||||
/**
|
||||
* Hash representing the block
|
||||
*/
|
||||
hash: string;
|
||||
/**
|
||||
* The only hash that can uniquely identify an anchored block or an unconfirmed state trie
|
||||
*/
|
||||
index_block_hash: string;
|
||||
/**
|
||||
* Hash of the parent block
|
||||
*/
|
||||
parent_block_hash: string;
|
||||
/**
|
||||
* Index block hash of the parent block
|
||||
*/
|
||||
parent_index_block_hash: string;
|
||||
/**
|
||||
* Unix timestamp (in seconds) indicating when this block was mined.
|
||||
*/
|
||||
burn_block_time: number;
|
||||
/**
|
||||
* An ISO 8601 (YYYY-MM-DDTHH:mm:ss.sssZ) indicating when this block was mined.
|
||||
*/
|
||||
burn_block_time_iso: string;
|
||||
/**
|
||||
* Hash of the anchor chain block
|
||||
*/
|
||||
burn_block_hash: string;
|
||||
/**
|
||||
* Height of the anchor chain block
|
||||
*/
|
||||
burn_block_height: number;
|
||||
/**
|
||||
* Anchor chain transaction ID
|
||||
*/
|
||||
miner_txid: string;
|
||||
/**
|
||||
* List of transactions included in the block
|
||||
*/
|
||||
txs: string[];
|
||||
/**
|
||||
* Execution cost read count.
|
||||
*/
|
||||
execution_cost_read_count: number;
|
||||
/**
|
||||
* Execution cost read length.
|
||||
*/
|
||||
execution_cost_read_length: number;
|
||||
/**
|
||||
* Execution cost runtime.
|
||||
*/
|
||||
execution_cost_runtime: number;
|
||||
/**
|
||||
* Execution cost write count.
|
||||
*/
|
||||
execution_cost_write_count: number;
|
||||
/**
|
||||
* Execution cost write length.
|
||||
*/
|
||||
execution_cost_write_length: number;
|
||||
}
|
||||
/**
|
||||
* Error
|
||||
*/
|
||||
|
||||
@@ -660,10 +660,60 @@ paths:
|
||||
example:
|
||||
$ref: ./api/blocks/get-burn-blocks.example.json
|
||||
|
||||
/extended/v2/blocks:
|
||||
get:
|
||||
summary: Get blocks
|
||||
description: |
|
||||
Retrieves a list of recently mined blocks
|
||||
tags:
|
||||
- Blocks
|
||||
operationId: get_blocks
|
||||
parameters:
|
||||
- name: limit
|
||||
in: query
|
||||
description: max number of blocks to fetch
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
example: 20
|
||||
- name: offset
|
||||
in: query
|
||||
description: index of first burn block to fetch
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
example: 0
|
||||
- name: burn_block_hash
|
||||
in: query
|
||||
description: filter blocks by burn block hash
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
example: "0xb154c008df2101023a6d0d54986b3964cee58119eed14f5bed98e15678e18fe2"
|
||||
- name: burn_block_height
|
||||
in: query
|
||||
description: filter blocks by burn block height
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
example: 810344
|
||||
responses:
|
||||
200:
|
||||
description: List of blocks
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ./api/blocks/get-nakamoto-blocks.schema.json
|
||||
example:
|
||||
$ref: ./api/blocks/get-nakamoto-blocks.example.json
|
||||
|
||||
/extended/v1/block:
|
||||
get:
|
||||
summary: Get recent blocks
|
||||
deprecated: true
|
||||
description: |
|
||||
**NOTE:** This endpoint is deprecated in favor of [Get blocks](#operation/get_blocks).
|
||||
|
||||
Retrieves a list of recently mined blocks
|
||||
|
||||
If you need to actively monitor new blocks, we highly recommend subscribing to [WebSockets or Socket.io](https://github.com/hirosystems/stacks-blockchain-api/tree/master/client) for real-time updates.
|
||||
@@ -766,7 +816,11 @@ paths:
|
||||
example: "0x00000000000000000002bba732926cf68b6eda3e2cdbc2a85af79f10efeeeb10"
|
||||
get:
|
||||
summary: Get block by burnchain block hash
|
||||
description: Retrieves block details of a specific block for a given burnchain block hash
|
||||
deprecated: true
|
||||
description: |
|
||||
**NOTE:** This endpoint is deprecated in favor of [Get blocks](#operation/get_blocks).
|
||||
|
||||
Retrieves block details of a specific block for a given burnchain block hash
|
||||
tags:
|
||||
- Blocks
|
||||
operationId: get_block_by_burn_block_hash
|
||||
@@ -797,7 +851,11 @@ paths:
|
||||
example: 744603
|
||||
get:
|
||||
summary: Get block by burnchain height
|
||||
description: Retrieves block details of a specific block for a given burn chain height
|
||||
deprecated: true
|
||||
description: |
|
||||
**NOTE:** This endpoint is deprecated in favor of [Get blocks](#operation/get_blocks).
|
||||
|
||||
Retrieves block details of a specific block for a given burn chain height
|
||||
tags:
|
||||
- Blocks
|
||||
operationId: get_block_by_burn_block_height
|
||||
|
||||
13
package-lock.json
generated
13
package-lock.json
generated
@@ -15,6 +15,7 @@
|
||||
"@promster/server": "6.0.6",
|
||||
"@promster/types": "3.2.3",
|
||||
"@scure/base": "1.1.1",
|
||||
"@sinclair/typebox": "0.31.28",
|
||||
"@stacks/common": "6.8.1",
|
||||
"@stacks/network": "6.8.1",
|
||||
"@stacks/stacking": "6.9.0",
|
||||
@@ -1832,6 +1833,12 @@
|
||||
"node": "^14.15.0 || ^16.10.0 || >=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@jest/schemas/node_modules/@sinclair/typebox": {
|
||||
"version": "0.27.8",
|
||||
"resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz",
|
||||
"integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@jest/source-map": {
|
||||
"version": "29.6.3",
|
||||
"resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz",
|
||||
@@ -2549,9 +2556,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@sinclair/typebox": {
|
||||
"version": "0.27.8",
|
||||
"resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz",
|
||||
"integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA=="
|
||||
"version": "0.31.28",
|
||||
"resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.31.28.tgz",
|
||||
"integrity": "sha512-/s55Jujywdw/Jpan+vsy6JZs1z2ZTGxTmbZTPiuSL2wz9mfzA2gN1zzaqmvfi4pq+uOt7Du85fkiwv5ymW84aQ=="
|
||||
},
|
||||
"node_modules/@sinonjs/commons": {
|
||||
"version": "3.0.0",
|
||||
|
||||
@@ -90,6 +90,7 @@
|
||||
"@promster/server": "6.0.6",
|
||||
"@promster/types": "3.2.3",
|
||||
"@scure/base": "1.1.1",
|
||||
"@sinclair/typebox": "0.31.28",
|
||||
"@stacks/common": "6.8.1",
|
||||
"@stacks/network": "6.8.1",
|
||||
"@stacks/stacking": "6.9.0",
|
||||
|
||||
@@ -68,6 +68,7 @@ import { getOperations, parseTransactionMemo } from '../../rosetta/rosetta-helpe
|
||||
import { PgStore } from '../../datastore/pg-store';
|
||||
import { SyntheticPoxEventName } from '../../pox-helpers';
|
||||
import { logger } from '../../logger';
|
||||
import { BlocksQueryParams } from '../routes/v2/schemas';
|
||||
|
||||
export function parseTxTypeStrings(values: string[]): TransactionType[] {
|
||||
return values.map(v => {
|
||||
|
||||
108
src/api/init.ts
108
src/api/init.ts
@@ -1,6 +1,5 @@
|
||||
import { Server, createServer } from 'http';
|
||||
import { Socket } from 'net';
|
||||
import * as querystring from 'querystring';
|
||||
import * as express from 'express';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import * as cors from 'cors';
|
||||
@@ -44,8 +43,9 @@ import { WebSocketTransmitter } from './routes/ws/web-socket-transmitter';
|
||||
import { createPoxEventsRouter } from './routes/pox';
|
||||
import { logger, loggerMiddleware } from '../logger';
|
||||
import { SERVER_VERSION, isPgConnectionError, isProdEnv, waiter } from '@hirosystems/api-toolkit';
|
||||
import { createBurnBlockRouter } from './routes/burn-block';
|
||||
import { createV2BlocksRouter } from './routes/v2/blocks';
|
||||
import { getReqQuery } from './query-helpers';
|
||||
import { createBurnBlockRouter } from './routes/burn-block';
|
||||
|
||||
export interface ApiServer {
|
||||
expressApp: express.Express;
|
||||
@@ -171,9 +171,9 @@ export async function startApiServer(opts: {
|
||||
res.send(errObj).status(404);
|
||||
});
|
||||
|
||||
// Setup extended API v1 routes
|
||||
// Setup extended API routes
|
||||
app.use(
|
||||
'/extended/v1',
|
||||
'/extended',
|
||||
(() => {
|
||||
const router = express.Router();
|
||||
router.use(cors());
|
||||
@@ -182,52 +182,72 @@ export async function startApiServer(opts: {
|
||||
res.set('Cache-Control', 'no-store');
|
||||
next();
|
||||
});
|
||||
router.use('/tx', createTxRouter(datastore));
|
||||
router.use('/block', createBlockRouter(datastore));
|
||||
router.use('/microblock', createMicroblockRouter(datastore));
|
||||
router.use('/burn_block', createBurnBlockRouter(datastore));
|
||||
router.use('/burnchain', createBurnchainRouter(datastore));
|
||||
router.use('/contract', createContractRouter(datastore));
|
||||
// same here, exclude account nonce route
|
||||
router.use('/address', createAddressRouter(datastore, chainId));
|
||||
router.use('/search', createSearchRouter(datastore));
|
||||
router.use('/info', createInfoRouter(datastore));
|
||||
router.use('/stx_supply', createStxSupplyRouter(datastore));
|
||||
router.use('/debug', createDebugRouter(datastore));
|
||||
router.use('/status', createStatusRouter(datastore));
|
||||
router.use('/fee_rate', createFeeRateRouter(datastore));
|
||||
router.use('/tokens', createTokenRouter(datastore));
|
||||
router.use(
|
||||
'/v1',
|
||||
(() => {
|
||||
const v1 = express.Router();
|
||||
v1.use('/tx', createTxRouter(datastore));
|
||||
v1.use('/block', createBlockRouter(datastore));
|
||||
v1.use('/microblock', createMicroblockRouter(datastore));
|
||||
v1.use('/burnchain', createBurnchainRouter(datastore));
|
||||
v1.use('/contract', createContractRouter(datastore));
|
||||
v1.use('/address', createAddressRouter(datastore, chainId));
|
||||
v1.use('/search', createSearchRouter(datastore));
|
||||
v1.use('/info', createInfoRouter(datastore));
|
||||
v1.use('/stx_supply', createStxSupplyRouter(datastore));
|
||||
v1.use('/debug', createDebugRouter(datastore));
|
||||
v1.use('/status', createStatusRouter(datastore));
|
||||
v1.use('/fee_rate', createFeeRateRouter(datastore));
|
||||
v1.use('/tokens', createTokenRouter(datastore));
|
||||
v1.use('/burn_block', createBurnBlockRouter(datastore));
|
||||
|
||||
// These could be defined in one route but a url reporting library breaks with regex in middleware paths
|
||||
router.use('/pox2', createPoxEventsRouter(datastore, 'pox2'));
|
||||
router.use('/pox3', createPoxEventsRouter(datastore, 'pox3'));
|
||||
router.use('/pox4', createPoxEventsRouter(datastore, 'pox4'));
|
||||
const legacyPoxPathRouter: express.RequestHandler = (req, res) => {
|
||||
// Redirect old pox routes paths to new one above
|
||||
const newPath = req.path === '/' ? '/events' : req.path;
|
||||
const baseUrl = req.baseUrl.replace(/(pox[\d])_events/, '$1');
|
||||
const redirectPath = `${baseUrl}${newPath}${getReqQuery(req)}`;
|
||||
return res.redirect(redirectPath);
|
||||
};
|
||||
router.use('/pox2_events', legacyPoxPathRouter);
|
||||
router.use('/pox3_events', legacyPoxPathRouter);
|
||||
router.use('/pox4_events', legacyPoxPathRouter);
|
||||
// These could be defined in one route but a url reporting library breaks with regex in middleware paths
|
||||
v1.use('/pox2', createPoxEventsRouter(datastore, 'pox2'));
|
||||
v1.use('/pox3', createPoxEventsRouter(datastore, 'pox3'));
|
||||
v1.use('/pox4', createPoxEventsRouter(datastore, 'pox4'));
|
||||
const legacyPoxPathRouter: express.RequestHandler = (req, res) => {
|
||||
// Redirect old pox routes paths to new one above
|
||||
const newPath = req.path === '/' ? '/events' : req.path;
|
||||
const baseUrl = req.baseUrl.replace(/(pox[\d])_events/, '$1');
|
||||
const redirectPath = `${baseUrl}${newPath}${getReqQuery(req)}`;
|
||||
return res.redirect(redirectPath);
|
||||
};
|
||||
v1.use('/pox2_events', legacyPoxPathRouter);
|
||||
v1.use('/pox3_events', legacyPoxPathRouter);
|
||||
v1.use('/pox4_events', legacyPoxPathRouter);
|
||||
|
||||
if (getChainIDNetwork(chainId) === 'testnet' && writeDatastore) {
|
||||
router.use('/faucets', createFaucetRouter(writeDatastore));
|
||||
}
|
||||
if (getChainIDNetwork(chainId) === 'testnet' && writeDatastore) {
|
||||
v1.use('/faucets', createFaucetRouter(writeDatastore));
|
||||
}
|
||||
return v1;
|
||||
})()
|
||||
);
|
||||
router.use(
|
||||
'/v2',
|
||||
(() => {
|
||||
const v2 = express.Router();
|
||||
v2.use('/blocks', createV2BlocksRouter(datastore));
|
||||
return v2;
|
||||
})()
|
||||
);
|
||||
router.use(
|
||||
'/beta',
|
||||
(() => {
|
||||
const beta = express.Router();
|
||||
// Redirect to new endpoint for backward compatibility.
|
||||
// TODO: remove this in the future
|
||||
beta.use('/stacking/:pool_principal/delegations', (req, res) => {
|
||||
const { pool_principal } = req.params;
|
||||
const newPath = `/extended/v1/pox3/${pool_principal}/delegations${getReqQuery(req)}`;
|
||||
return res.redirect(newPath);
|
||||
});
|
||||
return beta;
|
||||
})()
|
||||
);
|
||||
return router;
|
||||
})()
|
||||
);
|
||||
|
||||
// Redirect to new endpoint for backward compatibility.
|
||||
// TODO: remove this in the future
|
||||
app.use('/extended/beta/stacking/:pool_principal/delegations', (req, res) => {
|
||||
const { pool_principal } = req.params;
|
||||
const newPath = `/extended/v1/pox3/${pool_principal}/delegations${getReqQuery(req)}`;
|
||||
return res.redirect(newPath);
|
||||
});
|
||||
|
||||
// Setup direct proxy to core-node RPC endpoints (/v2)
|
||||
// pricing endpoint
|
||||
app.use(
|
||||
|
||||
@@ -38,7 +38,7 @@ export enum ResourceType {
|
||||
BurnBlock,
|
||||
}
|
||||
|
||||
const pagingQueryLimits: Record<ResourceType, { defaultLimit: number; maxLimit: number }> = {
|
||||
export const pagingQueryLimits: Record<ResourceType, { defaultLimit: number; maxLimit: number }> = {
|
||||
[ResourceType.Block]: {
|
||||
defaultLimit: 20,
|
||||
maxLimit: 30,
|
||||
|
||||
35
src/api/routes/v2/blocks.ts
Normal file
35
src/api/routes/v2/blocks.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import * as express from 'express';
|
||||
import { PgStore } from '../../../datastore/pg-store';
|
||||
import {
|
||||
getETagCacheHandler,
|
||||
setETagCacheHeaders,
|
||||
} from '../../../api/controllers/cache-controller';
|
||||
import { asyncHandler } from '../../async-handler';
|
||||
import { NakamotoBlockListResponse } from 'docs/generated';
|
||||
import { BlockLimitParam, BlocksQueryParams, CompiledBlocksQueryParams } from './schemas';
|
||||
import { parseDbNakamotoBlock, validRequestQuery } from './helpers';
|
||||
|
||||
export function createV2BlocksRouter(db: PgStore): express.Router {
|
||||
const router = express.Router();
|
||||
const cacheHandler = getETagCacheHandler(db);
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
cacheHandler,
|
||||
asyncHandler(async (req, res) => {
|
||||
if (!validRequestQuery(req, res, CompiledBlocksQueryParams)) return;
|
||||
const query = req.query as BlocksQueryParams;
|
||||
|
||||
const { results, total } = await db.getV2Blocks(query);
|
||||
const response: NakamotoBlockListResponse = {
|
||||
limit: query.limit ?? BlockLimitParam.default,
|
||||
offset: query.offset ?? 0,
|
||||
total,
|
||||
results: results.map(r => parseDbNakamotoBlock(r)),
|
||||
};
|
||||
setETagCacheHeaders(res);
|
||||
res.json(response);
|
||||
})
|
||||
);
|
||||
return router;
|
||||
}
|
||||
49
src/api/routes/v2/helpers.ts
Normal file
49
src/api/routes/v2/helpers.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { NakamotoBlock } from 'docs/generated';
|
||||
import { BlockWithTransactionIds } from '../../../datastore/common';
|
||||
import { unixEpochToIso } from '../../../helpers';
|
||||
import { TypeCheck } from '@sinclair/typebox/compiler';
|
||||
import { Request, Response } from 'express';
|
||||
import { TSchema } from '@sinclair/typebox';
|
||||
|
||||
/**
|
||||
* Validate request query parameters with a TypeBox compiled schema
|
||||
* @param req - Request
|
||||
* @param res - Response
|
||||
* @param compiledType - TypeBox compiled schema
|
||||
* @returns boolean
|
||||
*/
|
||||
export function validRequestQuery(
|
||||
req: Request,
|
||||
res: Response,
|
||||
compiledType: TypeCheck<TSchema>
|
||||
): boolean {
|
||||
if (!compiledType.Check(req.query)) {
|
||||
// TODO: Return a more user-friendly error
|
||||
res.status(400).json({ errors: [...compiledType.Errors(req.query)] });
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function parseDbNakamotoBlock(block: BlockWithTransactionIds): NakamotoBlock {
|
||||
const apiBlock: NakamotoBlock = {
|
||||
canonical: block.canonical,
|
||||
height: block.block_height,
|
||||
hash: block.block_hash,
|
||||
index_block_hash: block.index_block_hash,
|
||||
parent_block_hash: block.parent_block_hash,
|
||||
parent_index_block_hash: block.parent_index_block_hash,
|
||||
burn_block_time: block.burn_block_time,
|
||||
burn_block_time_iso: unixEpochToIso(block.burn_block_time),
|
||||
burn_block_hash: block.burn_block_hash,
|
||||
burn_block_height: block.burn_block_height,
|
||||
miner_txid: block.miner_txid,
|
||||
txs: [...block.tx_ids],
|
||||
execution_cost_read_count: block.execution_cost_read_count,
|
||||
execution_cost_read_length: block.execution_cost_read_length,
|
||||
execution_cost_runtime: block.execution_cost_runtime,
|
||||
execution_cost_write_count: block.execution_cost_write_count,
|
||||
execution_cost_write_length: block.execution_cost_write_length,
|
||||
};
|
||||
return apiBlock;
|
||||
}
|
||||
70
src/api/routes/v2/schemas.ts
Normal file
70
src/api/routes/v2/schemas.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { Type, Static } from '@sinclair/typebox';
|
||||
import { TypeCompiler } from '@sinclair/typebox/compiler';
|
||||
import { ResourceType, pagingQueryLimits } from '../../../api/pagination';
|
||||
|
||||
// ==========================
|
||||
// Parameters
|
||||
// ==========================
|
||||
|
||||
const OffsetParam = Type.Integer({
|
||||
minimum: 0,
|
||||
title: 'Offset',
|
||||
description: 'Result offset',
|
||||
});
|
||||
|
||||
export const BlockLimitParam = Type.Integer({
|
||||
minimum: 1,
|
||||
maximum: pagingQueryLimits[ResourceType.Block].maxLimit,
|
||||
default: pagingQueryLimits[ResourceType.Block].defaultLimit,
|
||||
title: 'Block limit',
|
||||
description: 'Blocks per page',
|
||||
});
|
||||
|
||||
const BurnBlockHashParam = Type.RegExp(/^(0x)?[a-fA-F0-9]{64}$/i, {
|
||||
title: 'Burn block hash',
|
||||
description: 'Burn block hash',
|
||||
examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
|
||||
});
|
||||
|
||||
const BurnBlockHeightParam = Type.RegExp(/^[0-9]+$/, {
|
||||
title: 'Burn block height',
|
||||
description: 'Burn block height',
|
||||
examples: ['777678'],
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Query params
|
||||
// TODO: Migrate these to each endpoint after switching from Express to Fastify
|
||||
// ==========================
|
||||
|
||||
const PaginationParamsSchema = Type.Object(
|
||||
{
|
||||
limit: Type.Optional(BlockLimitParam),
|
||||
offset: Type.Optional(OffsetParam),
|
||||
},
|
||||
{ additionalProperties: false }
|
||||
);
|
||||
|
||||
const BlocksQueryParamsSchema = Type.Union([
|
||||
PaginationParamsSchema,
|
||||
Type.Composite(
|
||||
[
|
||||
Type.Object({
|
||||
burn_block_hash: Type.Union([Type.Literal('latest'), BurnBlockHashParam]),
|
||||
}),
|
||||
PaginationParamsSchema,
|
||||
],
|
||||
{ additionalProperties: false }
|
||||
),
|
||||
Type.Composite(
|
||||
[
|
||||
Type.Object({
|
||||
burn_block_height: Type.Union([Type.Literal('latest'), BurnBlockHeightParam]),
|
||||
}),
|
||||
PaginationParamsSchema,
|
||||
],
|
||||
{ additionalProperties: false }
|
||||
),
|
||||
]);
|
||||
export type BlocksQueryParams = Static<typeof BlocksQueryParamsSchema>;
|
||||
export const CompiledBlocksQueryParams = TypeCompiler.Compile(BlocksQueryParamsSchema);
|
||||
@@ -1025,6 +1025,15 @@ export interface TransferQueryResult {
|
||||
amount: string;
|
||||
}
|
||||
|
||||
export type DbPaginatedResult<T> = {
|
||||
total: number;
|
||||
results: T[];
|
||||
};
|
||||
|
||||
export type BlockWithTransactionIds = DbBlock & {
|
||||
tx_ids: string[];
|
||||
};
|
||||
|
||||
export interface BlocksWithMetadata {
|
||||
results: {
|
||||
block: DbBlock;
|
||||
|
||||
@@ -15,12 +15,14 @@ import {
|
||||
bnsNameFromSubdomain,
|
||||
ChainID,
|
||||
REPO_DIR,
|
||||
normalizeHashString,
|
||||
} from '../helpers';
|
||||
import { PgStoreEventEmitter } from './pg-store-event-emitter';
|
||||
import {
|
||||
AddressNftEventIdentifier,
|
||||
BlockIdentifier,
|
||||
BlockQueryResult,
|
||||
BlockWithTransactionIds,
|
||||
BlocksWithMetadata,
|
||||
ContractTxQueryResult,
|
||||
DbAssetEventTypeId,
|
||||
@@ -44,8 +46,7 @@ import {
|
||||
DbMicroblock,
|
||||
DbMinerReward,
|
||||
DbNftEvent,
|
||||
DbPoxSyntheticEvent,
|
||||
DbPoxStacker,
|
||||
DbPaginatedResult,
|
||||
DbRewardSlotHolder,
|
||||
DbSearchResult,
|
||||
DbSmartContract,
|
||||
@@ -70,6 +71,8 @@ import {
|
||||
StxUnlockEvent,
|
||||
TransferQueryResult,
|
||||
PoxSyntheticEventTable,
|
||||
DbPoxStacker,
|
||||
DbPoxSyntheticEvent,
|
||||
} from './common';
|
||||
import {
|
||||
abiColumn,
|
||||
@@ -101,6 +104,7 @@ import {
|
||||
getPgConnectionEnvValue,
|
||||
} from './connection';
|
||||
import * as path from 'path';
|
||||
import { BlockLimitParam, BlocksQueryParams } from '../api/routes/v2/schemas';
|
||||
|
||||
export const MIGRATIONS_DIR = path.join(REPO_DIR, 'migrations');
|
||||
|
||||
@@ -550,6 +554,88 @@ export class PgStore extends BasePgStore {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Block information with transaction IDs
|
||||
* @returns Paginated `BlockWithTransactionIds` array
|
||||
*/
|
||||
async getV2Blocks(args: BlocksQueryParams): Promise<DbPaginatedResult<BlockWithTransactionIds>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const limit = args.limit ?? BlockLimitParam.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const burnBlockHashCond =
|
||||
'burn_block_hash' in args
|
||||
? sql`burn_block_hash = ${
|
||||
args.burn_block_hash === 'latest'
|
||||
? sql`(SELECT burn_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: sql`${normalizeHashString(args.burn_block_hash)}`
|
||||
}`
|
||||
: undefined;
|
||||
const burnBlockHeightCond =
|
||||
'burn_block_height' in args
|
||||
? sql`burn_block_height = ${
|
||||
args.burn_block_height === 'latest'
|
||||
? sql`(SELECT burn_block_height FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: sql`${args.burn_block_height}`
|
||||
}`
|
||||
: undefined;
|
||||
|
||||
// Obtain blocks and transaction counts in the same query.
|
||||
const blocksQuery = await sql<
|
||||
(BlockQueryResult & {
|
||||
tx_ids: string;
|
||||
microblocks_accepted: string;
|
||||
microblocks_streamed: string;
|
||||
total: number;
|
||||
})[]
|
||||
>`
|
||||
WITH block_count AS (
|
||||
${
|
||||
'burn_block_hash' in args
|
||||
? sql`SELECT COUNT(*) AS count FROM blocks WHERE canonical = TRUE AND ${burnBlockHashCond}`
|
||||
: 'burn_block_height' in args
|
||||
? sql`SELECT COUNT(*) AS count FROM blocks WHERE canonical = TRUE AND ${burnBlockHeightCond}`
|
||||
: sql`SELECT block_count AS count FROM chain_tip`
|
||||
}
|
||||
)
|
||||
SELECT
|
||||
${sql(BLOCK_COLUMNS)},
|
||||
(
|
||||
SELECT STRING_AGG(tx_id,',')
|
||||
FROM txs
|
||||
WHERE index_block_hash = blocks.index_block_hash
|
||||
AND canonical = true
|
||||
AND microblock_canonical = true
|
||||
) AS tx_ids,
|
||||
(SELECT count FROM block_count)::int AS total
|
||||
FROM blocks
|
||||
WHERE canonical = true
|
||||
AND ${
|
||||
'burn_block_hash' in args
|
||||
? burnBlockHashCond
|
||||
: 'burn_block_height' in args
|
||||
? burnBlockHeightCond
|
||||
: sql`TRUE`
|
||||
}
|
||||
ORDER BY block_height DESC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
if (blocksQuery.count === 0)
|
||||
return {
|
||||
results: [],
|
||||
total: 0,
|
||||
};
|
||||
const blocks = blocksQuery.map(b => ({
|
||||
...parseBlockQueryResult(b),
|
||||
tx_ids: b.tx_ids ? b.tx_ids.split(',') : [],
|
||||
}));
|
||||
return {
|
||||
results: blocks,
|
||||
total: blocksQuery[0].total,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getBlockTxs(indexBlockHash: string) {
|
||||
const result = await this.sql<{ tx_id: string; tx_index: number }[]>`
|
||||
SELECT tx_id, tx_index
|
||||
|
||||
@@ -630,4 +630,111 @@ describe('block tests', () => {
|
||||
expect(blockQuery.body.execution_cost_write_count).toBe(3);
|
||||
expect(blockQuery.body.execution_cost_write_length).toBe(3);
|
||||
});
|
||||
|
||||
test('blocks v2 filtered by burn block', async () => {
|
||||
for (let i = 1; i < 6; i++) {
|
||||
const block = new TestBlockBuilder({
|
||||
block_height: i,
|
||||
block_hash: `0x000${i}`,
|
||||
index_block_hash: `0x000${i}`,
|
||||
parent_index_block_hash: `0x000${i - 1}`,
|
||||
parent_block_hash: `0x000${i - 1}`,
|
||||
burn_block_height: 700000,
|
||||
burn_block_hash: '0x00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee5ca3d8',
|
||||
})
|
||||
.addTx({ tx_id: `0x000${i}` })
|
||||
.build();
|
||||
await db.update(block);
|
||||
}
|
||||
for (let i = 6; i < 9; i++) {
|
||||
const block = new TestBlockBuilder({
|
||||
block_height: i,
|
||||
block_hash: `0x000${i}`,
|
||||
index_block_hash: `0x000${i}`,
|
||||
parent_index_block_hash: `0x000${i - 1}`,
|
||||
parent_block_hash: `0x000${i - 1}`,
|
||||
burn_block_height: 700001,
|
||||
burn_block_hash: '0x000000000000000000028eacd4e6e58405d5a37d06b5d7b93776f1eab68d2494',
|
||||
})
|
||||
.addTx({ tx_id: `0x001${i}` })
|
||||
.build();
|
||||
await db.update(block);
|
||||
}
|
||||
|
||||
// Filter by burn hash
|
||||
const block5 = {
|
||||
burn_block_hash: '0x00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee5ca3d8',
|
||||
burn_block_height: 700000,
|
||||
burn_block_time: 94869286,
|
||||
burn_block_time_iso: '1973-01-03T00:34:46.000Z',
|
||||
canonical: true,
|
||||
execution_cost_read_count: 0,
|
||||
execution_cost_read_length: 0,
|
||||
execution_cost_runtime: 0,
|
||||
execution_cost_write_count: 0,
|
||||
execution_cost_write_length: 0,
|
||||
hash: '0x0005',
|
||||
height: 5,
|
||||
index_block_hash: '0x0005',
|
||||
miner_txid: '0x4321',
|
||||
parent_block_hash: '0x0004',
|
||||
parent_index_block_hash: '0x0004',
|
||||
txs: ['0x0005'],
|
||||
};
|
||||
let fetch = await supertest(api.server).get(
|
||||
`/extended/v2/blocks?burn_block_hash=00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee5ca3d8`
|
||||
);
|
||||
let json = JSON.parse(fetch.text);
|
||||
expect(fetch.status).toBe(200);
|
||||
expect(json.total).toEqual(5);
|
||||
expect(json.results[0]).toStrictEqual(block5);
|
||||
|
||||
// Filter by burn height
|
||||
fetch = await supertest(api.server).get(`/extended/v2/blocks?burn_block_height=700000`);
|
||||
json = JSON.parse(fetch.text);
|
||||
expect(fetch.status).toBe(200);
|
||||
expect(json.total).toEqual(5);
|
||||
expect(json.results[0]).toStrictEqual(block5);
|
||||
|
||||
// Get latest block
|
||||
const block8 = {
|
||||
burn_block_hash: '0x000000000000000000028eacd4e6e58405d5a37d06b5d7b93776f1eab68d2494',
|
||||
burn_block_height: 700001,
|
||||
burn_block_time: 94869286,
|
||||
burn_block_time_iso: '1973-01-03T00:34:46.000Z',
|
||||
canonical: true,
|
||||
execution_cost_read_count: 0,
|
||||
execution_cost_read_length: 0,
|
||||
execution_cost_runtime: 0,
|
||||
execution_cost_write_count: 0,
|
||||
execution_cost_write_length: 0,
|
||||
hash: '0x0008',
|
||||
height: 8,
|
||||
index_block_hash: '0x0008',
|
||||
miner_txid: '0x4321',
|
||||
parent_block_hash: '0x0007',
|
||||
parent_index_block_hash: '0x0007',
|
||||
txs: ['0x0018'],
|
||||
};
|
||||
fetch = await supertest(api.server).get(`/extended/v2/blocks?burn_block_hash=latest`);
|
||||
json = JSON.parse(fetch.text);
|
||||
expect(fetch.status).toBe(200);
|
||||
expect(json.total).toEqual(3);
|
||||
expect(json.results[0]).toStrictEqual(block8);
|
||||
fetch = await supertest(api.server).get(`/extended/v2/blocks?burn_block_height=latest`);
|
||||
json = JSON.parse(fetch.text);
|
||||
expect(fetch.status).toBe(200);
|
||||
expect(json.total).toEqual(3);
|
||||
expect(json.results[0]).toStrictEqual(block8);
|
||||
|
||||
// Can't filter by both params
|
||||
fetch = await supertest(api.server).get(
|
||||
`/extended/v2/blocks?burn_block_hash=latest&burn_block_height=latest`
|
||||
);
|
||||
expect(fetch.status).toBe(400);
|
||||
|
||||
// Block hashes are validated
|
||||
fetch = await supertest(api.server).get(`/extended/v2/blocks?burn_block_hash=testvalue`);
|
||||
expect(fetch.status).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user