mirror of
https://github.com/alexgo-io/stacks-blockchain-api.git
synced 2026-01-12 16:53:19 +08:00
feat: create /extended/v2/burn-blocks/:height_or_hash/blocks endpoint (#1782)
* chore: move to v2 pg submodule * fix: move blocks per burn block endpoint * docs: openapi * docs: endpoint name
This commit is contained in:
@@ -21,6 +21,8 @@ tags:
|
||||
url: https://docs.stacks.co/understand-stacks/accounts
|
||||
- name: Blocks
|
||||
description: Read-only endpoints to obtain Stacks block details
|
||||
- name: Burn Blocks
|
||||
description: Read-only endpoints to obtain burn block details
|
||||
- name: Faucets
|
||||
description: Endpoints to request STX or BTC tokens (not possible on Mainnet)
|
||||
- name: Fees
|
||||
@@ -613,7 +615,7 @@ paths:
|
||||
description: |
|
||||
Retrieves a list of recent burn blocks
|
||||
tags:
|
||||
- Blocks
|
||||
- Burn Blocks
|
||||
operationId: get_burn_blocks
|
||||
parameters:
|
||||
- name: limit
|
||||
@@ -646,7 +648,7 @@ paths:
|
||||
summary: Get burn block
|
||||
description: Retrieves a single burn block
|
||||
tags:
|
||||
- Blocks
|
||||
- Burn Blocks
|
||||
operationId: get_burn_block
|
||||
parameters:
|
||||
- name: height_or_hash
|
||||
@@ -668,6 +670,49 @@ paths:
|
||||
$ref: ./entities/blocks/burn-block.schema.json
|
||||
example:
|
||||
$ref: ./entities/blocks/burn-block.example.json
|
||||
|
||||
/extended/v2/burn-blocks/{height_or_hash}/blocks:
|
||||
get:
|
||||
summary: Get blocks by burn block
|
||||
description: |
|
||||
Retrieves a list of blocks confirmed by a specific burn block
|
||||
tags:
|
||||
- Blocks
|
||||
operationId: get_blocks_by_burn_block
|
||||
parameters:
|
||||
- name: height_or_hash
|
||||
in: path
|
||||
description: filter by burn block height, hash, or the constant `latest` to filter for the most recent burn block
|
||||
required: true
|
||||
schema:
|
||||
oneOf:
|
||||
- type: integer
|
||||
example: 42000
|
||||
- type: string
|
||||
example: "0x4839a8b01cfb39ffcc0d07d3db31e848d5adf5279d529ed5062300b9f353ff79"
|
||||
- name: limit
|
||||
in: query
|
||||
description: max number of blocks to fetch
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
example: 20
|
||||
- name: offset
|
||||
in: query
|
||||
description: index of first burn block to fetch
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
example: 0
|
||||
responses:
|
||||
200:
|
||||
description: List of blocks
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ./api/blocks/get-nakamoto-blocks.schema.json
|
||||
example:
|
||||
$ref: ./api/blocks/get-nakamoto-blocks.example.json
|
||||
|
||||
/extended/v2/blocks:
|
||||
get:
|
||||
@@ -692,20 +737,6 @@ paths:
|
||||
schema:
|
||||
type: integer
|
||||
example: 0
|
||||
- name: burn_block_hash
|
||||
in: query
|
||||
description: filter blocks by burn block hash
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
example: "0xb154c008df2101023a6d0d54986b3964cee58119eed14f5bed98e15678e18fe2"
|
||||
- name: burn_block_height
|
||||
in: query
|
||||
description: filter blocks by burn block height
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
example: 810344
|
||||
responses:
|
||||
200:
|
||||
description: List of blocks
|
||||
@@ -747,12 +778,12 @@ paths:
|
||||
|
||||
/extended/v2/blocks/{height_or_hash}/transactions:
|
||||
get:
|
||||
summary: Get block transactions
|
||||
summary: Get transactions by block
|
||||
description: |
|
||||
Retrieves transactions confirmed in a single block
|
||||
tags:
|
||||
- Blocks
|
||||
operationId: get_block_transactions
|
||||
- Transactions
|
||||
operationId: get_transactions_by_block
|
||||
parameters:
|
||||
- name: height_or_hash
|
||||
in: path
|
||||
@@ -2913,77 +2944,15 @@ paths:
|
||||
example:
|
||||
$ref: ./api/bns/errors/bns-unsupported-blockchain.example.json
|
||||
|
||||
# /v1/subdomains:
|
||||
# get:
|
||||
# summary: Get All Subdomains
|
||||
# description: Retrieves a list of all subdomains known to the node.
|
||||
# tags:
|
||||
# - Names
|
||||
# operationId: get_all_subdomains
|
||||
# parameters:
|
||||
# - name: page
|
||||
# in: query
|
||||
# description: names are returned in pages of size 100, so specify the page number.
|
||||
# required: true
|
||||
# example: 3
|
||||
# schema:
|
||||
# type: integer
|
||||
# responses:
|
||||
# 200:
|
||||
# description: Success
|
||||
# content:
|
||||
# application/json:
|
||||
# schema:
|
||||
# $ref: ./api/bns/name-querying/bns-get-all-subdomains-response.schema.json
|
||||
# example:
|
||||
# $ref: ./api/bns/name-querying/bns-get-all-subdomains-response.example.json
|
||||
# 400:
|
||||
# description: Error
|
||||
# content:
|
||||
# application/json:
|
||||
# schema:
|
||||
# $ref: ./api/bns/errors/bns-error.schema.json
|
||||
# example:
|
||||
# $ref: ./api/bns/errors/bns-invalid-page.example.json
|
||||
#
|
||||
# /v1/subdomains/{txid}:
|
||||
# get:
|
||||
# summary: Get Subdomain at Transaction
|
||||
# description: Retrieves the list of subdomain operations processed by a given transaction. The returned array includes subdomain operations that have not yet been accepted as part of any subdomain’s history (checkable via the accepted field). If the given transaction ID does not correspond to a Stacks transaction that introduced new subdomain operations, and empty array will be returned.
|
||||
# tags:
|
||||
# - Names
|
||||
# operationId: get_subdomain_at_transaction
|
||||
# parameters:
|
||||
# - name: txid
|
||||
# in: path
|
||||
# description: transaction id
|
||||
# required: true
|
||||
# schema:
|
||||
# type: string
|
||||
# example: "d04d708472ea3c147f50e43264efdb1535f71974053126dc4db67b3ac19d41fe"
|
||||
# responses:
|
||||
# 200:
|
||||
# description: Success
|
||||
# content:
|
||||
# application/json:
|
||||
# schema:
|
||||
# $ref: ./api/bns/name-querying/bns-get-subdomain-at-tx-response.schema.json
|
||||
# example:
|
||||
# $ref: ./api/bns/name-querying/bns-get-subdomain-at-tx-response.example.json
|
||||
# 400:
|
||||
# description: Error
|
||||
# content:
|
||||
# application/json:
|
||||
# schema:
|
||||
# $ref: ./api/bns/errors/bns-error.schema.json
|
||||
# example:
|
||||
# $ref: ./api/bns/errors/bns-invalid-tx-id.example.json
|
||||
|
||||
/extended/v1/tx/block/{block_hash}:
|
||||
get:
|
||||
deprecated: true
|
||||
operationId: get_transactions_by_block_hash
|
||||
summary: Transactions by block hash
|
||||
description: Retrieves a list of all transactions within a block for a given block hash.
|
||||
description: |
|
||||
**NOTE:** This endpoint is deprecated in favor of [Get transactions by block](#operation/get_transactions_by_block).
|
||||
|
||||
Retrieves a list of all transactions within a block for a given block hash.
|
||||
tags:
|
||||
- Transactions
|
||||
parameters:
|
||||
@@ -3020,9 +2989,13 @@ paths:
|
||||
|
||||
/extended/v1/tx/block_height/{height}:
|
||||
get:
|
||||
deprecated: true
|
||||
operationId: get_transactions_by_block_height
|
||||
summary: Transactions by block height
|
||||
description: Retrieves all transactions within a block at a given height
|
||||
description: |
|
||||
**NOTE:** This endpoint is deprecated in favor of [Get transactions by block](#operation/get_transactions_by_block).
|
||||
|
||||
Retrieves all transactions within a block at a given height
|
||||
tags:
|
||||
- Transactions
|
||||
parameters:
|
||||
|
||||
@@ -68,7 +68,6 @@ import { getOperations, parseTransactionMemo } from '../../rosetta/rosetta-helpe
|
||||
import { PgStore } from '../../datastore/pg-store';
|
||||
import { SyntheticPoxEventName } from '../../pox-helpers';
|
||||
import { logger } from '../../logger';
|
||||
import { BlocksQueryParams } from '../routes/v2/schemas';
|
||||
|
||||
export function parseTxTypeStrings(values: string[]): TransactionType[] {
|
||||
return values.map(v => {
|
||||
|
||||
@@ -7,14 +7,14 @@ import {
|
||||
import { asyncHandler } from '../../async-handler';
|
||||
import { NakamotoBlockListResponse, TransactionResults } from 'docs/generated';
|
||||
import {
|
||||
BlocksQueryParams,
|
||||
BlockParams,
|
||||
CompiledBlocksQueryParams,
|
||||
CompiledBlockParams,
|
||||
CompiledTransactionPaginationQueryParams,
|
||||
TransactionPaginationQueryParams,
|
||||
validRequestQuery,
|
||||
validRequestParams,
|
||||
CompiledBlockPaginationQueryParams,
|
||||
BlockPaginationQueryParams,
|
||||
} from './schemas';
|
||||
import { parseDbNakamotoBlock } from './helpers';
|
||||
import { InvalidRequestError } from '../../../errors';
|
||||
@@ -28,10 +28,10 @@ export function createV2BlocksRouter(db: PgStore): express.Router {
|
||||
'/',
|
||||
cacheHandler,
|
||||
asyncHandler(async (req, res) => {
|
||||
if (!validRequestQuery(req, res, CompiledBlocksQueryParams)) return;
|
||||
const query = req.query as BlocksQueryParams;
|
||||
if (!validRequestQuery(req, res, CompiledBlockPaginationQueryParams)) return;
|
||||
const query = req.query as BlockPaginationQueryParams;
|
||||
|
||||
const { limit, offset, results, total } = await db.getV2Blocks(query);
|
||||
const { limit, offset, results, total } = await db.v2.getBlocks(query);
|
||||
const response: NakamotoBlockListResponse = {
|
||||
limit,
|
||||
offset,
|
||||
@@ -50,7 +50,7 @@ export function createV2BlocksRouter(db: PgStore): express.Router {
|
||||
if (!validRequestParams(req, res, CompiledBlockParams)) return;
|
||||
const params = req.params as BlockParams;
|
||||
|
||||
const block = await db.getV2Block(params);
|
||||
const block = await db.v2.getBlock(params);
|
||||
if (!block) {
|
||||
res.status(404).json({ errors: 'Not found' });
|
||||
return;
|
||||
@@ -73,7 +73,7 @@ export function createV2BlocksRouter(db: PgStore): express.Router {
|
||||
const query = req.query as TransactionPaginationQueryParams;
|
||||
|
||||
try {
|
||||
const { limit, offset, results, total } = await db.getV2BlockTransactions({
|
||||
const { limit, offset, results, total } = await db.v2.getBlockTransactions({
|
||||
...params,
|
||||
...query,
|
||||
});
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import * as express from 'express';
|
||||
import { BurnBlockListResponse } from '@stacks/stacks-blockchain-api-types';
|
||||
import {
|
||||
BurnBlockListResponse,
|
||||
NakamotoBlockListResponse,
|
||||
} from '@stacks/stacks-blockchain-api-types';
|
||||
import { getETagCacheHandler, setETagCacheHeaders } from '../../controllers/cache-controller';
|
||||
import { asyncHandler } from '../../async-handler';
|
||||
import { PgStore } from '../../../datastore/pg-store';
|
||||
import { parseDbBurnBlock } from './helpers';
|
||||
import { parseDbBurnBlock, parseDbNakamotoBlock } from './helpers';
|
||||
import {
|
||||
BlockPaginationQueryParams,
|
||||
BlockParams,
|
||||
@@ -12,6 +15,7 @@ import {
|
||||
validRequestParams,
|
||||
validRequestQuery,
|
||||
} from './schemas';
|
||||
import { InvalidRequestError } from '../../../errors';
|
||||
|
||||
export function createV2BurnBlocksRouter(db: PgStore): express.Router {
|
||||
const router = express.Router();
|
||||
@@ -24,7 +28,7 @@ export function createV2BurnBlocksRouter(db: PgStore): express.Router {
|
||||
if (!validRequestQuery(req, res, CompiledBlockPaginationQueryParams)) return;
|
||||
const query = req.query as BlockPaginationQueryParams;
|
||||
|
||||
const { limit, offset, results, total } = await db.getBurnBlocks(query);
|
||||
const { limit, offset, results, total } = await db.v2.getBurnBlocks(query);
|
||||
const response: BurnBlockListResponse = {
|
||||
limit,
|
||||
offset,
|
||||
@@ -43,7 +47,7 @@ export function createV2BurnBlocksRouter(db: PgStore): express.Router {
|
||||
if (!validRequestParams(req, res, CompiledBlockParams)) return;
|
||||
const params = req.params as BlockParams;
|
||||
|
||||
const block = await db.getBurnBlock(params);
|
||||
const block = await db.v2.getBurnBlock(params);
|
||||
if (!block) {
|
||||
res.status(404).json({ errors: 'Not found' });
|
||||
return;
|
||||
@@ -53,5 +57,40 @@ export function createV2BurnBlocksRouter(db: PgStore): express.Router {
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:height_or_hash/blocks',
|
||||
cacheHandler,
|
||||
asyncHandler(async (req, res) => {
|
||||
if (
|
||||
!validRequestParams(req, res, CompiledBlockParams) ||
|
||||
!validRequestQuery(req, res, CompiledBlockPaginationQueryParams)
|
||||
)
|
||||
return;
|
||||
const params = req.params as BlockParams;
|
||||
const query = req.query as BlockPaginationQueryParams;
|
||||
|
||||
try {
|
||||
const { limit, offset, results, total } = await db.v2.getBlocksByBurnBlock({
|
||||
...params,
|
||||
...query,
|
||||
});
|
||||
const response: NakamotoBlockListResponse = {
|
||||
limit,
|
||||
offset,
|
||||
total,
|
||||
results: results.map(r => parseDbNakamotoBlock(r)),
|
||||
};
|
||||
setETagCacheHeaders(res);
|
||||
res.json(response);
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidRequestError) {
|
||||
res.status(404).json({ errors: error.message });
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
|
||||
@@ -113,30 +113,6 @@ export const CompiledTransactionPaginationQueryParams = ajv.compile(
|
||||
TransactionPaginationQueryParamsSchema
|
||||
);
|
||||
|
||||
const BlocksQueryParamsSchema = Type.Union([
|
||||
BlockPaginationQueryParamsSchema,
|
||||
Type.Composite(
|
||||
[
|
||||
Type.Object({
|
||||
burn_block_hash: Type.Union([Type.Literal('latest'), BurnBlockHashParamSchema]),
|
||||
}),
|
||||
BlockPaginationQueryParamsSchema,
|
||||
],
|
||||
{ additionalProperties: false }
|
||||
),
|
||||
Type.Composite(
|
||||
[
|
||||
Type.Object({
|
||||
burn_block_height: Type.Union([Type.Literal('latest'), BurnBlockHeightParamSchema]),
|
||||
}),
|
||||
BlockPaginationQueryParamsSchema,
|
||||
],
|
||||
{ additionalProperties: false }
|
||||
),
|
||||
]);
|
||||
export type BlocksQueryParams = Static<typeof BlocksQueryParamsSchema>;
|
||||
export const CompiledBlocksQueryParams = ajv.compile(BlocksQueryParamsSchema);
|
||||
|
||||
const BlockParamsSchema = Type.Object(
|
||||
{
|
||||
height_or_hash: Type.Union([
|
||||
|
||||
233
src/datastore/pg-store-v2.ts
Normal file
233
src/datastore/pg-store-v2.ts
Normal file
@@ -0,0 +1,233 @@
|
||||
import { BasePgStoreModule } from '@hirosystems/api-toolkit';
|
||||
import {
|
||||
BlockLimitParamSchema,
|
||||
CompiledBurnBlockHashParam,
|
||||
TransactionPaginationQueryParams,
|
||||
TransactionLimitParamSchema,
|
||||
BlockParams,
|
||||
BlockPaginationQueryParams,
|
||||
} from '../api/routes/v2/schemas';
|
||||
import { InvalidRequestError, InvalidRequestErrorType } from '../errors';
|
||||
import { normalizeHashString } from '../helpers';
|
||||
import {
|
||||
DbPaginatedResult,
|
||||
DbBlock,
|
||||
BlockQueryResult,
|
||||
DbTx,
|
||||
TxQueryResult,
|
||||
DbBurnBlock,
|
||||
} from './common';
|
||||
import { BLOCK_COLUMNS, parseBlockQueryResult, TX_COLUMNS, parseTxQueryResult } from './helpers';
|
||||
|
||||
export class PgStoreV2 extends BasePgStoreModule {
|
||||
async getBlocks(args: BlockPaginationQueryParams): Promise<DbPaginatedResult<DbBlock>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const limit = args.limit ?? BlockLimitParamSchema.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const blocksQuery = await sql<(BlockQueryResult & { total: number })[]>`
|
||||
WITH block_count AS (
|
||||
SELECT block_count AS count FROM chain_tip
|
||||
)
|
||||
SELECT
|
||||
${sql(BLOCK_COLUMNS)},
|
||||
(SELECT count FROM block_count)::int AS total
|
||||
FROM blocks
|
||||
WHERE canonical = true
|
||||
ORDER BY block_height DESC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
if (blocksQuery.count === 0)
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: [],
|
||||
total: 0,
|
||||
};
|
||||
const blocks = blocksQuery.map(b => parseBlockQueryResult(b));
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: blocks,
|
||||
total: blocksQuery[0].total,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getBlocksByBurnBlock(
|
||||
args: BlockParams & BlockPaginationQueryParams
|
||||
): Promise<DbPaginatedResult<DbBlock>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const limit = args.limit ?? BlockLimitParamSchema.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const filter =
|
||||
args.height_or_hash === 'latest'
|
||||
? sql`burn_block_hash = (SELECT burn_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: CompiledBurnBlockHashParam(args.height_or_hash)
|
||||
? sql`burn_block_hash = ${normalizeHashString(args.height_or_hash)}`
|
||||
: sql`burn_block_height = ${args.height_or_hash}`;
|
||||
const blockCheck = await sql`SELECT burn_block_hash FROM blocks WHERE ${filter} LIMIT 1`;
|
||||
if (blockCheck.count === 0)
|
||||
throw new InvalidRequestError(
|
||||
`Burn block not found`,
|
||||
InvalidRequestErrorType.invalid_param
|
||||
);
|
||||
|
||||
const blocksQuery = await sql<(BlockQueryResult & { total: number })[]>`
|
||||
WITH block_count AS (
|
||||
SELECT COUNT(*) AS count FROM blocks WHERE canonical = TRUE AND ${filter}
|
||||
)
|
||||
SELECT
|
||||
${sql(BLOCK_COLUMNS)},
|
||||
(SELECT count FROM block_count)::int AS total
|
||||
FROM blocks
|
||||
WHERE canonical = true AND ${filter}
|
||||
ORDER BY block_height DESC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
if (blocksQuery.count === 0)
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: [],
|
||||
total: 0,
|
||||
};
|
||||
const blocks = blocksQuery.map(b => parseBlockQueryResult(b));
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: blocks,
|
||||
total: blocksQuery[0].total,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getBlock(args: BlockParams): Promise<DbBlock | undefined> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const filter =
|
||||
args.height_or_hash === 'latest'
|
||||
? sql`index_block_hash = (SELECT index_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: CompiledBurnBlockHashParam(args.height_or_hash)
|
||||
? sql`(
|
||||
block_hash = ${normalizeHashString(args.height_or_hash)}
|
||||
OR index_block_hash = ${normalizeHashString(args.height_or_hash)}
|
||||
)`
|
||||
: sql`block_height = ${args.height_or_hash}`;
|
||||
const blockQuery = await sql<BlockQueryResult[]>`
|
||||
SELECT ${sql(BLOCK_COLUMNS)}
|
||||
FROM blocks
|
||||
WHERE canonical = true AND ${filter}
|
||||
LIMIT 1
|
||||
`;
|
||||
if (blockQuery.count > 0) return parseBlockQueryResult(blockQuery[0]);
|
||||
});
|
||||
}
|
||||
|
||||
async getBlockTransactions(
|
||||
args: BlockParams & TransactionPaginationQueryParams
|
||||
): Promise<DbPaginatedResult<DbTx>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const limit = args.limit ?? TransactionLimitParamSchema.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const filter =
|
||||
args.height_or_hash === 'latest'
|
||||
? sql`index_block_hash = (SELECT index_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: CompiledBurnBlockHashParam(args.height_or_hash)
|
||||
? sql`(
|
||||
block_hash = ${normalizeHashString(args.height_or_hash)}
|
||||
OR index_block_hash = ${normalizeHashString(args.height_or_hash)}
|
||||
)`
|
||||
: sql`block_height = ${args.height_or_hash}`;
|
||||
const blockCheck = await sql`SELECT index_block_hash FROM blocks WHERE ${filter} LIMIT 1`;
|
||||
if (blockCheck.count === 0)
|
||||
throw new InvalidRequestError(`Block not found`, InvalidRequestErrorType.invalid_param);
|
||||
const txsQuery = await sql<(TxQueryResult & { total: number })[]>`
|
||||
WITH tx_count AS (
|
||||
SELECT tx_count AS total FROM blocks WHERE canonical = TRUE AND ${filter}
|
||||
)
|
||||
SELECT ${sql(TX_COLUMNS)}, (SELECT total FROM tx_count)::int AS total
|
||||
FROM txs
|
||||
WHERE canonical = true
|
||||
AND microblock_canonical = true
|
||||
AND ${filter}
|
||||
ORDER BY microblock_sequence ASC, tx_index ASC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
if (txsQuery.count === 0)
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: [],
|
||||
total: 0,
|
||||
};
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: txsQuery.map(t => parseTxQueryResult(t)),
|
||||
total: txsQuery[0].total,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getBurnBlocks(args: BlockPaginationQueryParams): Promise<DbPaginatedResult<DbBurnBlock>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const limit = args.limit ?? BlockLimitParamSchema.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const blocksQuery = await sql<(DbBurnBlock & { total: number })[]>`
|
||||
WITH block_count AS (
|
||||
SELECT burn_block_height, block_count AS count FROM chain_tip
|
||||
)
|
||||
SELECT DISTINCT ON (burn_block_height)
|
||||
burn_block_time,
|
||||
burn_block_hash,
|
||||
burn_block_height,
|
||||
ARRAY_AGG(block_hash) OVER (
|
||||
PARTITION BY burn_block_height
|
||||
ORDER BY block_height DESC
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
|
||||
) AS stacks_blocks,
|
||||
(SELECT count FROM block_count)::int AS total
|
||||
FROM blocks
|
||||
WHERE canonical = true
|
||||
ORDER BY burn_block_height DESC, block_height DESC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
const blocks = blocksQuery.map(r => r);
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: blocks,
|
||||
total: blocks[0].total,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getBurnBlock(args: BlockParams): Promise<DbBurnBlock | undefined> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const filter =
|
||||
args.height_or_hash === 'latest'
|
||||
? sql`burn_block_hash = (SELECT burn_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: CompiledBurnBlockHashParam(args.height_or_hash)
|
||||
? sql`burn_block_hash = ${args.height_or_hash}`
|
||||
: sql`burn_block_height = ${args.height_or_hash}`;
|
||||
const blockQuery = await sql<DbBurnBlock[]>`
|
||||
SELECT DISTINCT ON (burn_block_height)
|
||||
burn_block_time,
|
||||
burn_block_hash,
|
||||
burn_block_height,
|
||||
ARRAY_AGG(block_hash) OVER (
|
||||
PARTITION BY burn_block_height
|
||||
ORDER BY block_height DESC
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
|
||||
) AS stacks_blocks
|
||||
FROM blocks
|
||||
WHERE canonical = true AND ${filter}
|
||||
LIMIT 1
|
||||
`;
|
||||
if (blockQuery.count > 0) return blockQuery[0];
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
bnsNameFromSubdomain,
|
||||
ChainID,
|
||||
REPO_DIR,
|
||||
normalizeHashString,
|
||||
} from '../helpers';
|
||||
import { PgStoreEventEmitter } from './pg-store-event-emitter';
|
||||
import {
|
||||
@@ -29,7 +28,6 @@ import {
|
||||
DbBnsNamespace,
|
||||
DbBnsSubdomain,
|
||||
DbBnsZoneFile,
|
||||
DbBurnBlock,
|
||||
DbBurnchainReward,
|
||||
DbChainTip,
|
||||
DbEvent,
|
||||
@@ -44,7 +42,6 @@ import {
|
||||
DbMicroblock,
|
||||
DbMinerReward,
|
||||
DbNftEvent,
|
||||
DbPaginatedResult,
|
||||
DbRewardSlotHolder,
|
||||
DbSearchResult,
|
||||
DbSmartContract,
|
||||
@@ -71,7 +68,6 @@ import {
|
||||
PoxSyntheticEventTable,
|
||||
DbPoxStacker,
|
||||
DbPoxSyntheticEvent,
|
||||
TxQueryResult,
|
||||
} from './common';
|
||||
import {
|
||||
abiColumn,
|
||||
@@ -103,16 +99,7 @@ import {
|
||||
getPgConnectionEnvValue,
|
||||
} from './connection';
|
||||
import * as path from 'path';
|
||||
import {
|
||||
BlockLimitParamSchema,
|
||||
BlockPaginationQueryParams,
|
||||
BlocksQueryParams,
|
||||
BlockParams,
|
||||
TransactionPaginationQueryParams,
|
||||
TransactionLimitParamSchema,
|
||||
CompiledBurnBlockHashParam,
|
||||
} from '../api/routes/v2/schemas';
|
||||
import { InvalidRequestError, InvalidRequestErrorType } from '../errors';
|
||||
import { PgStoreV2 } from './pg-store-v2';
|
||||
|
||||
export const MIGRATIONS_DIR = path.join(REPO_DIR, 'migrations');
|
||||
|
||||
@@ -123,6 +110,7 @@ export const MIGRATIONS_DIR = path.join(REPO_DIR, 'migrations');
|
||||
* happened in the `PgServer.primary` server (see `.env`).
|
||||
*/
|
||||
export class PgStore extends BasePgStore {
|
||||
readonly v2: PgStoreV2;
|
||||
readonly eventEmitter: PgStoreEventEmitter;
|
||||
readonly notifier?: PgNotifier;
|
||||
|
||||
@@ -130,6 +118,7 @@ export class PgStore extends BasePgStore {
|
||||
super(sql);
|
||||
this.notifier = notifier;
|
||||
this.eventEmitter = new PgStoreEventEmitter();
|
||||
this.v2 = new PgStoreV2(this);
|
||||
}
|
||||
|
||||
static async connect({
|
||||
@@ -404,66 +393,6 @@ export class PgStore extends BasePgStore {
|
||||
return { found: true, result: block } as const;
|
||||
}
|
||||
|
||||
async getBurnBlocks(args: BlockPaginationQueryParams): Promise<DbPaginatedResult<DbBurnBlock>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const limit = args.limit ?? BlockLimitParamSchema.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const blocksQuery = await sql<(DbBurnBlock & { total: number })[]>`
|
||||
WITH block_count AS (
|
||||
SELECT burn_block_height, block_count AS count FROM chain_tip
|
||||
)
|
||||
SELECT DISTINCT ON (burn_block_height)
|
||||
burn_block_time,
|
||||
burn_block_hash,
|
||||
burn_block_height,
|
||||
ARRAY_AGG(block_hash) OVER (
|
||||
PARTITION BY burn_block_height
|
||||
ORDER BY block_height DESC
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
|
||||
) AS stacks_blocks,
|
||||
(SELECT count FROM block_count)::int AS total
|
||||
FROM blocks
|
||||
WHERE canonical = true
|
||||
ORDER BY burn_block_height DESC, block_height DESC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
const blocks = blocksQuery.map(r => r);
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: blocks,
|
||||
total: blocks[0].total,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getBurnBlock(args: BlockParams): Promise<DbBurnBlock | undefined> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const filter =
|
||||
args.height_or_hash === 'latest'
|
||||
? sql`burn_block_hash = (SELECT burn_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: CompiledBurnBlockHashParam(args.height_or_hash)
|
||||
? sql`burn_block_hash = ${args.height_or_hash}`
|
||||
: sql`burn_block_height = ${args.height_or_hash}`;
|
||||
const blockQuery = await sql<DbBurnBlock[]>`
|
||||
SELECT DISTINCT ON (burn_block_height)
|
||||
burn_block_time,
|
||||
burn_block_hash,
|
||||
burn_block_height,
|
||||
ARRAY_AGG(block_hash) OVER (
|
||||
PARTITION BY burn_block_height
|
||||
ORDER BY block_height DESC
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
|
||||
) AS stacks_blocks
|
||||
FROM blocks
|
||||
WHERE canonical = true AND ${filter}
|
||||
LIMIT 1
|
||||
`;
|
||||
if (blockQuery.count > 0) return blockQuery[0];
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Block information with metadata, including accepted and streamed microblocks hash
|
||||
* @returns `BlocksWithMetadata` object including list of Blocks with metadata and total count.
|
||||
@@ -573,143 +502,6 @@ export class PgStore extends BasePgStore {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Block information with transaction IDs
|
||||
* @returns Paginated `DbBlock` array
|
||||
*/
|
||||
async getV2Blocks(args: BlocksQueryParams): Promise<DbPaginatedResult<DbBlock>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const limit = args.limit ?? BlockLimitParamSchema.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const burnBlockHashCond =
|
||||
'burn_block_hash' in args
|
||||
? sql`burn_block_hash = ${
|
||||
args.burn_block_hash === 'latest'
|
||||
? sql`(SELECT burn_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: sql`${normalizeHashString(args.burn_block_hash)}`
|
||||
}`
|
||||
: undefined;
|
||||
const burnBlockHeightCond =
|
||||
'burn_block_height' in args
|
||||
? sql`burn_block_height = ${
|
||||
args.burn_block_height === 'latest'
|
||||
? sql`(SELECT burn_block_height FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: sql`${args.burn_block_height}`
|
||||
}`
|
||||
: undefined;
|
||||
|
||||
// Obtain blocks and transaction counts in the same query.
|
||||
const blocksQuery = await sql<(BlockQueryResult & { total: number })[]>`
|
||||
WITH block_count AS (
|
||||
${
|
||||
'burn_block_hash' in args
|
||||
? sql`SELECT COUNT(*) AS count FROM blocks WHERE canonical = TRUE AND ${burnBlockHashCond}`
|
||||
: 'burn_block_height' in args
|
||||
? sql`SELECT COUNT(*) AS count FROM blocks WHERE canonical = TRUE AND ${burnBlockHeightCond}`
|
||||
: sql`SELECT block_count AS count FROM chain_tip`
|
||||
}
|
||||
)
|
||||
SELECT
|
||||
${sql(BLOCK_COLUMNS)},
|
||||
(SELECT count FROM block_count)::int AS total
|
||||
FROM blocks
|
||||
WHERE canonical = true
|
||||
AND ${
|
||||
'burn_block_hash' in args
|
||||
? burnBlockHashCond
|
||||
: 'burn_block_height' in args
|
||||
? burnBlockHeightCond
|
||||
: sql`TRUE`
|
||||
}
|
||||
ORDER BY block_height DESC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
if (blocksQuery.count === 0)
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: [],
|
||||
total: 0,
|
||||
};
|
||||
const blocks = blocksQuery.map(b => parseBlockQueryResult(b));
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: blocks,
|
||||
total: blocksQuery[0].total,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getV2Block(args: BlockParams): Promise<DbBlock | undefined> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const filter =
|
||||
args.height_or_hash === 'latest'
|
||||
? sql`index_block_hash = (SELECT index_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: CompiledBurnBlockHashParam(args.height_or_hash)
|
||||
? sql`(
|
||||
block_hash = ${normalizeHashString(args.height_or_hash)}
|
||||
OR index_block_hash = ${normalizeHashString(args.height_or_hash)}
|
||||
)`
|
||||
: sql`block_height = ${args.height_or_hash}`;
|
||||
const blockQuery = await sql<BlockQueryResult[]>`
|
||||
SELECT ${sql(BLOCK_COLUMNS)}
|
||||
FROM blocks
|
||||
WHERE canonical = true AND ${filter}
|
||||
LIMIT 1
|
||||
`;
|
||||
if (blockQuery.count > 0) return parseBlockQueryResult(blockQuery[0]);
|
||||
});
|
||||
}
|
||||
|
||||
async getV2BlockTransactions(
|
||||
args: BlockParams & TransactionPaginationQueryParams
|
||||
): Promise<DbPaginatedResult<DbTx>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const limit = args.limit ?? TransactionLimitParamSchema.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const filter =
|
||||
args.height_or_hash === 'latest'
|
||||
? sql`index_block_hash = (SELECT index_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: CompiledBurnBlockHashParam(args.height_or_hash)
|
||||
? sql`(
|
||||
block_hash = ${normalizeHashString(args.height_or_hash)}
|
||||
OR index_block_hash = ${normalizeHashString(args.height_or_hash)}
|
||||
)`
|
||||
: sql`block_height = ${args.height_or_hash}`;
|
||||
const blockCheck = await sql`SELECT index_block_hash FROM blocks WHERE ${filter} LIMIT 1`;
|
||||
if (blockCheck.count === 0)
|
||||
throw new InvalidRequestError(`Block not found`, InvalidRequestErrorType.invalid_param);
|
||||
const txsQuery = await sql<(TxQueryResult & { total: number })[]>`
|
||||
WITH tx_count AS (
|
||||
SELECT tx_count AS total FROM blocks WHERE canonical = TRUE AND ${filter}
|
||||
)
|
||||
SELECT ${sql(TX_COLUMNS)}, (SELECT total FROM tx_count)::int AS total
|
||||
FROM txs
|
||||
WHERE canonical = true
|
||||
AND microblock_canonical = true
|
||||
AND ${filter}
|
||||
ORDER BY microblock_sequence ASC, tx_index ASC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
if (txsQuery.count === 0)
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: [],
|
||||
total: 0,
|
||||
};
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: txsQuery.map(t => parseTxQueryResult(t)),
|
||||
total: txsQuery[0].total,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Only used in tests
|
||||
*/
|
||||
|
||||
@@ -678,7 +678,7 @@ describe('block tests', () => {
|
||||
tx_count: 1,
|
||||
};
|
||||
let fetch = await supertest(api.server).get(
|
||||
`/extended/v2/blocks?burn_block_hash=00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee5ca3d8`
|
||||
`/extended/v2/burn-blocks/00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee5ca3d8/blocks`
|
||||
);
|
||||
let json = JSON.parse(fetch.text);
|
||||
expect(fetch.status).toBe(200);
|
||||
@@ -686,7 +686,7 @@ describe('block tests', () => {
|
||||
expect(json.results[0]).toStrictEqual(block5);
|
||||
|
||||
// Filter by burn height
|
||||
fetch = await supertest(api.server).get(`/extended/v2/blocks?burn_block_height=700000`);
|
||||
fetch = await supertest(api.server).get(`/extended/v2/burn-blocks/700000/blocks`);
|
||||
json = JSON.parse(fetch.text);
|
||||
expect(fetch.status).toBe(200);
|
||||
expect(json.total).toEqual(5);
|
||||
@@ -712,25 +712,14 @@ describe('block tests', () => {
|
||||
parent_index_block_hash: '0x0007',
|
||||
tx_count: 1,
|
||||
};
|
||||
fetch = await supertest(api.server).get(`/extended/v2/blocks?burn_block_hash=latest`);
|
||||
fetch = await supertest(api.server).get(`/extended/v2/burn-blocks/latest/blocks`);
|
||||
json = JSON.parse(fetch.text);
|
||||
expect(fetch.status).toBe(200);
|
||||
expect(json.total).toEqual(3);
|
||||
expect(json.results[0]).toStrictEqual(block8);
|
||||
fetch = await supertest(api.server).get(`/extended/v2/blocks?burn_block_height=latest`);
|
||||
json = JSON.parse(fetch.text);
|
||||
expect(fetch.status).toBe(200);
|
||||
expect(json.total).toEqual(3);
|
||||
expect(json.results[0]).toStrictEqual(block8);
|
||||
|
||||
// Can't filter by both params
|
||||
fetch = await supertest(api.server).get(
|
||||
`/extended/v2/blocks?burn_block_hash=latest&burn_block_height=latest`
|
||||
);
|
||||
expect(fetch.status).toBe(400);
|
||||
|
||||
// Block hashes are validated
|
||||
fetch = await supertest(api.server).get(`/extended/v2/blocks?burn_block_hash=testvalue`);
|
||||
fetch = await supertest(api.server).get(`/extended/v2/burn-blocks/testvalue/blocks`);
|
||||
expect(fetch.status).toBe(400);
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user