mirror of
https://github.com/alexgo-io/stacks-blockchain-api.git
synced 2026-01-12 16:53:19 +08:00
fix: move /extended/v1/burn_block to /extended/v2/burn-blocks (#1772)
* fix: move burn block to v2 * fix: new file * fix: unused exports
This commit is contained in:
@@ -612,14 +612,14 @@ paths:
|
||||
schema:
|
||||
$ref: ./api/microblocks/get-unanchored-txs.schema.json
|
||||
|
||||
/extended/v1/burn_block:
|
||||
/extended/v2/burn-blocks:
|
||||
get:
|
||||
summary: Get recent burn blocks
|
||||
summary: Get burn blocks
|
||||
description: |
|
||||
Retrieves a list of recent burn blocks
|
||||
tags:
|
||||
- Blocks
|
||||
operationId: get_burn_block_list
|
||||
operationId: get_burn_blocks
|
||||
parameters:
|
||||
- name: limit
|
||||
in: query
|
||||
@@ -636,20 +636,6 @@ paths:
|
||||
schema:
|
||||
type: integer
|
||||
example: 42000
|
||||
- name: height
|
||||
in: query
|
||||
description: filter by burn block height
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
example: 42000
|
||||
- name: hash
|
||||
in: query
|
||||
description: filter by burn block hash or the constant 'latest' to filter for the most recent burn block
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
example: "0x4839a8b01cfb39ffcc0d07d3db31e848d5adf5279d529ed5062300b9f353ff79"
|
||||
responses:
|
||||
200:
|
||||
description: List of burn blocks
|
||||
@@ -660,6 +646,34 @@ paths:
|
||||
example:
|
||||
$ref: ./api/blocks/get-burn-blocks.example.json
|
||||
|
||||
/extended/v2/burn-blocks/{height_or_hash}:
|
||||
get:
|
||||
summary: Get burn block
|
||||
description: Retrieves a single burn block
|
||||
tags:
|
||||
- Blocks
|
||||
operationId: get_burn_block
|
||||
parameters:
|
||||
- name: height_or_hash
|
||||
in: path
|
||||
description: filter by burn block height, hash, or the constant `latest` to filter for the most recent burn block
|
||||
required: true
|
||||
schema:
|
||||
oneOf:
|
||||
- type: integer
|
||||
example: 42000
|
||||
- type: string
|
||||
example: "0x4839a8b01cfb39ffcc0d07d3db31e848d5adf5279d529ed5062300b9f353ff79"
|
||||
responses:
|
||||
200:
|
||||
description: Burn block
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: ./entities/blocks/burn-block.schema.json
|
||||
example:
|
||||
$ref: ./entities/blocks/burn-block.example.json
|
||||
|
||||
/extended/v2/blocks:
|
||||
get:
|
||||
summary: Get blocks
|
||||
|
||||
@@ -538,35 +538,6 @@ export async function getMicroblockFromDataStore({
|
||||
};
|
||||
}
|
||||
|
||||
export async function getBurnBlocksFromDataStore(args: {
|
||||
db: PgStore;
|
||||
limit: number;
|
||||
offset: number;
|
||||
height: number | null;
|
||||
hash: 'latest' | string | null;
|
||||
}): Promise<{ total: number; results: BurnBlock[] }> {
|
||||
const query = await args.db.getBurnBlocks({
|
||||
limit: args.limit,
|
||||
offset: args.offset,
|
||||
height: args.height,
|
||||
hash: args.hash,
|
||||
});
|
||||
const results = query.results.map(r => {
|
||||
const burnBlock: BurnBlock = {
|
||||
burn_block_time: r.burn_block_time,
|
||||
burn_block_time_iso: unixEpochToIso(r.burn_block_time),
|
||||
burn_block_hash: r.burn_block_hash,
|
||||
burn_block_height: r.burn_block_height,
|
||||
stacks_blocks: r.stacks_blocks,
|
||||
};
|
||||
return burnBlock;
|
||||
});
|
||||
return {
|
||||
total: query.total,
|
||||
results,
|
||||
};
|
||||
}
|
||||
|
||||
export async function getMicroblocksFromDataStore(args: {
|
||||
db: PgStore;
|
||||
limit: number;
|
||||
|
||||
@@ -45,7 +45,7 @@ import { logger, loggerMiddleware } from '../logger';
|
||||
import { SERVER_VERSION, isPgConnectionError, isProdEnv, waiter } from '@hirosystems/api-toolkit';
|
||||
import { createV2BlocksRouter } from './routes/v2/blocks';
|
||||
import { getReqQuery } from './query-helpers';
|
||||
import { createBurnBlockRouter } from './routes/burn-block';
|
||||
import { createV2BurnBlocksRouter } from './routes/v2/burn-blocks';
|
||||
|
||||
export interface ApiServer {
|
||||
expressApp: express.Express;
|
||||
@@ -199,7 +199,6 @@ export async function startApiServer(opts: {
|
||||
v1.use('/status', createStatusRouter(datastore));
|
||||
v1.use('/fee_rate', createFeeRateRouter(datastore));
|
||||
v1.use('/tokens', createTokenRouter(datastore));
|
||||
v1.use('/burn_block', createBurnBlockRouter(datastore));
|
||||
|
||||
// These could be defined in one route but a url reporting library breaks with regex in middleware paths
|
||||
v1.use('/pox2', createPoxEventsRouter(datastore, 'pox2'));
|
||||
@@ -227,6 +226,7 @@ export async function startApiServer(opts: {
|
||||
(() => {
|
||||
const v2 = express.Router();
|
||||
v2.use('/blocks', createV2BlocksRouter(datastore));
|
||||
v2.use('/burn-blocks', createV2BurnBlocksRouter(datastore));
|
||||
return v2;
|
||||
})()
|
||||
);
|
||||
|
||||
@@ -111,53 +111,6 @@ export function getBlockParams(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a block hash value from a given request query param.
|
||||
* If an error is encountered while parsing the param then a 400 response with an error message is sent and the function throws.
|
||||
* @param queryParamName - name of the query param
|
||||
* @param paramRequired - if true then the function will throw and return a 400 if the param is missing, if false then the function will return null if the param is missing
|
||||
*/
|
||||
export function getBlockHashQueryParam<TRequired extends boolean>(
|
||||
queryParamName: string,
|
||||
paramRequired: TRequired,
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): TRequired extends true ? string | never : string | null {
|
||||
if (!(queryParamName in req.query)) {
|
||||
if (paramRequired) {
|
||||
handleBadRequest(
|
||||
res,
|
||||
next,
|
||||
`Request is missing required "${queryParamName}" query parameter`
|
||||
);
|
||||
} else {
|
||||
return null as TRequired extends true ? string : string | null;
|
||||
}
|
||||
}
|
||||
const hashParamVal = req.query[queryParamName];
|
||||
if (typeof hashParamVal !== 'string') {
|
||||
handleBadRequest(
|
||||
res,
|
||||
next,
|
||||
`Unexpected type for block hash query parameter: ${JSON.stringify(hashParamVal)}`
|
||||
);
|
||||
}
|
||||
|
||||
// Extract the hash part, ignoring '0x' if present
|
||||
const match = hashParamVal.match(/^(0x)?([a-fA-F0-9]{64})$/i);
|
||||
if (!match) {
|
||||
handleBadRequest(
|
||||
res,
|
||||
next,
|
||||
"Invalid hash string. Ensure it is 64 hexadecimal characters long, with an optional '0x' prefix"
|
||||
);
|
||||
}
|
||||
|
||||
// Normalize the string
|
||||
return '0x' + match[2].toLowerCase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a block height value from a given request query param.
|
||||
* If an error is encountered while parsing the param then a 400 response with an error message is sent and the function throws.
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
import * as express from 'express';
|
||||
import { BurnBlockListResponse } from '@stacks/stacks-blockchain-api-types';
|
||||
import { getBurnBlocksFromDataStore } from '../controllers/db-controller';
|
||||
import { getPagingQueryLimit, parsePagingQueryInput, ResourceType } from '../pagination';
|
||||
import { getBlockHashQueryParam, getBlockHeightQueryParam } from '../query-helpers';
|
||||
import { getETagCacheHandler, setETagCacheHeaders } from '../controllers/cache-controller';
|
||||
import { asyncHandler } from '../async-handler';
|
||||
import { PgStore } from '../../datastore/pg-store';
|
||||
|
||||
export function createBurnBlockRouter(db: PgStore): express.Router {
|
||||
const router = express.Router();
|
||||
const cacheHandler = getETagCacheHandler(db);
|
||||
router.get(
|
||||
'/',
|
||||
cacheHandler,
|
||||
asyncHandler(async (req, res, next) => {
|
||||
const limit = getPagingQueryLimit(ResourceType.BurnBlock, req.query.limit);
|
||||
const offset = parsePagingQueryInput(req.query.offset ?? 0);
|
||||
const height =
|
||||
getBlockHeightQueryParam('height', false, req, res, next) ??
|
||||
getBlockHeightQueryParam('block_height', false, req, res, next);
|
||||
|
||||
let hash = req.query.hash === 'latest' ? 'latest' : null;
|
||||
if (!hash) {
|
||||
hash = getBlockHashQueryParam('hash', false, req, res, next);
|
||||
}
|
||||
|
||||
const { results, total } = await getBurnBlocksFromDataStore({
|
||||
offset,
|
||||
limit,
|
||||
db,
|
||||
height,
|
||||
hash,
|
||||
});
|
||||
setETagCacheHeaders(res);
|
||||
const response: BurnBlockListResponse = { limit, offset, total, results };
|
||||
res.json(response);
|
||||
})
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
} from '../../../api/controllers/cache-controller';
|
||||
import { asyncHandler } from '../../async-handler';
|
||||
import { NakamotoBlockListResponse } from 'docs/generated';
|
||||
import { BlockLimitParam, BlocksQueryParams, CompiledBlocksQueryParams } from './schemas';
|
||||
import { BlockLimitParamSchema, BlocksQueryParams, CompiledBlocksQueryParams } from './schemas';
|
||||
import { parseDbNakamotoBlock, validRequestQuery } from './helpers';
|
||||
|
||||
export function createV2BlocksRouter(db: PgStore): express.Router {
|
||||
@@ -20,10 +20,10 @@ export function createV2BlocksRouter(db: PgStore): express.Router {
|
||||
if (!validRequestQuery(req, res, CompiledBlocksQueryParams)) return;
|
||||
const query = req.query as BlocksQueryParams;
|
||||
|
||||
const { results, total } = await db.getV2Blocks(query);
|
||||
const { limit, offset, results, total } = await db.getV2Blocks(query);
|
||||
const response: NakamotoBlockListResponse = {
|
||||
limit: query.limit ?? BlockLimitParam.default,
|
||||
offset: query.offset ?? 0,
|
||||
limit,
|
||||
offset,
|
||||
total,
|
||||
results: results.map(r => parseDbNakamotoBlock(r)),
|
||||
};
|
||||
|
||||
55
src/api/routes/v2/burn-blocks.ts
Normal file
55
src/api/routes/v2/burn-blocks.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import * as express from 'express';
|
||||
import { BurnBlockListResponse } from '@stacks/stacks-blockchain-api-types';
|
||||
import { getETagCacheHandler, setETagCacheHeaders } from '../../controllers/cache-controller';
|
||||
import { asyncHandler } from '../../async-handler';
|
||||
import { PgStore } from '../../../datastore/pg-store';
|
||||
import { parseDbBurnBlock, validRequestParams, validRequestQuery } from './helpers';
|
||||
import {
|
||||
BlockPaginationQueryParams,
|
||||
BurnBlockParams,
|
||||
CompiledBlockPaginationParams,
|
||||
CompiledBurnBlockParams,
|
||||
} from './schemas';
|
||||
|
||||
export function createV2BurnBlocksRouter(db: PgStore): express.Router {
|
||||
const router = express.Router();
|
||||
const cacheHandler = getETagCacheHandler(db);
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
cacheHandler,
|
||||
asyncHandler(async (req, res) => {
|
||||
if (!validRequestQuery(req, res, CompiledBlockPaginationParams)) return;
|
||||
const query = req.query as BlockPaginationQueryParams;
|
||||
|
||||
const { limit, offset, results, total } = await db.getBurnBlocks(query);
|
||||
const response: BurnBlockListResponse = {
|
||||
limit,
|
||||
offset,
|
||||
total,
|
||||
results: results.map(r => parseDbBurnBlock(r)),
|
||||
};
|
||||
setETagCacheHeaders(res);
|
||||
res.json(response);
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:height_or_hash',
|
||||
cacheHandler,
|
||||
asyncHandler(async (req, res) => {
|
||||
if (!validRequestParams(req, res, CompiledBurnBlockParams)) return;
|
||||
const params = req.params as BurnBlockParams;
|
||||
|
||||
const block = await db.getBurnBlock(params);
|
||||
if (!block) {
|
||||
res.status(404).json({ errors: 'Not found' });
|
||||
return;
|
||||
}
|
||||
setETagCacheHeaders(res);
|
||||
res.json(parseDbBurnBlock(block));
|
||||
})
|
||||
);
|
||||
|
||||
return router;
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { NakamotoBlock } from 'docs/generated';
|
||||
import { BlockWithTransactionIds } from '../../../datastore/common';
|
||||
import { BurnBlock, NakamotoBlock } from 'docs/generated';
|
||||
import { BlockWithTransactionIds, DbBurnBlock } from '../../../datastore/common';
|
||||
import { unixEpochToIso } from '../../../helpers';
|
||||
import { TypeCheck } from '@sinclair/typebox/compiler';
|
||||
import { Request, Response } from 'express';
|
||||
@@ -25,6 +25,26 @@ export function validRequestQuery(
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate request path parameters with a TypeBox compiled schema
|
||||
* @param req - Request
|
||||
* @param res - Response
|
||||
* @param compiledType - TypeBox compiled schema
|
||||
* @returns boolean
|
||||
*/
|
||||
export function validRequestParams(
|
||||
req: Request,
|
||||
res: Response,
|
||||
compiledType: TypeCheck<TSchema>
|
||||
): boolean {
|
||||
if (!compiledType.Check(req.params)) {
|
||||
// TODO: Return a more user-friendly error
|
||||
res.status(400).json({ errors: [...compiledType.Errors(req.params)] });
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function parseDbNakamotoBlock(block: BlockWithTransactionIds): NakamotoBlock {
|
||||
const apiBlock: NakamotoBlock = {
|
||||
canonical: block.canonical,
|
||||
@@ -47,3 +67,14 @@ export function parseDbNakamotoBlock(block: BlockWithTransactionIds): NakamotoBl
|
||||
};
|
||||
return apiBlock;
|
||||
}
|
||||
|
||||
export function parseDbBurnBlock(block: DbBurnBlock): BurnBlock {
|
||||
const burnBlock: BurnBlock = {
|
||||
burn_block_time: block.burn_block_time,
|
||||
burn_block_time_iso: unixEpochToIso(block.burn_block_time),
|
||||
burn_block_hash: block.burn_block_hash,
|
||||
burn_block_height: block.burn_block_height,
|
||||
stacks_blocks: block.stacks_blocks,
|
||||
};
|
||||
return burnBlock;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Type, Static } from '@sinclair/typebox';
|
||||
import { Type, Static, TSchema } from '@sinclair/typebox';
|
||||
import { TypeCompiler } from '@sinclair/typebox/compiler';
|
||||
import { ResourceType, pagingQueryLimits } from '../../../api/pagination';
|
||||
|
||||
@@ -6,13 +6,13 @@ import { ResourceType, pagingQueryLimits } from '../../../api/pagination';
|
||||
// Parameters
|
||||
// ==========================
|
||||
|
||||
const OffsetParam = Type.Integer({
|
||||
const OffsetParamSchema = Type.Integer({
|
||||
minimum: 0,
|
||||
title: 'Offset',
|
||||
description: 'Result offset',
|
||||
});
|
||||
|
||||
export const BlockLimitParam = Type.Integer({
|
||||
export const BlockLimitParamSchema = Type.Integer({
|
||||
minimum: 1,
|
||||
maximum: pagingQueryLimits[ResourceType.Block].maxLimit,
|
||||
default: pagingQueryLimits[ResourceType.Block].defaultLimit,
|
||||
@@ -20,51 +20,73 @@ export const BlockLimitParam = Type.Integer({
|
||||
description: 'Blocks per page',
|
||||
});
|
||||
|
||||
const BurnBlockHashParam = Type.RegExp(/^(0x)?[a-fA-F0-9]{64}$/i, {
|
||||
const BurnBlockHashParamSchema = Type.RegExp(/^(0x)?[a-fA-F0-9]{64}$/i, {
|
||||
title: 'Burn block hash',
|
||||
description: 'Burn block hash',
|
||||
examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
|
||||
});
|
||||
type BurnBlockHashParam = Static<typeof BurnBlockHashParamSchema>;
|
||||
export const CompiledBurnBlockHashParam = TypeCompiler.Compile(BurnBlockHashParamSchema);
|
||||
|
||||
const BurnBlockHeightParam = Type.RegExp(/^[0-9]+$/, {
|
||||
const BurnBlockHeightParamSchema = Type.RegExp(/^[0-9]+$/, {
|
||||
title: 'Burn block height',
|
||||
description: 'Burn block height',
|
||||
examples: ['777678'],
|
||||
});
|
||||
type BurnBlockHeightParam = Static<typeof BurnBlockHeightParamSchema>;
|
||||
const CompiledBurnBlockHeightParam = TypeCompiler.Compile(BurnBlockHeightParamSchema);
|
||||
|
||||
// ==========================
|
||||
// Query params
|
||||
// Query and path params
|
||||
// TODO: Migrate these to each endpoint after switching from Express to Fastify
|
||||
// ==========================
|
||||
|
||||
const PaginationParamsSchema = Type.Object(
|
||||
{
|
||||
limit: Type.Optional(BlockLimitParam),
|
||||
offset: Type.Optional(OffsetParam),
|
||||
},
|
||||
{ additionalProperties: false }
|
||||
);
|
||||
const PaginationQueryParamsSchema = <T extends TSchema>(t: T) =>
|
||||
Type.Object(
|
||||
{
|
||||
limit: Type.Optional(t),
|
||||
offset: Type.Optional(OffsetParamSchema),
|
||||
},
|
||||
{ additionalProperties: false }
|
||||
);
|
||||
|
||||
const BlockPaginationQueryParamsSchema = PaginationQueryParamsSchema(BlockLimitParamSchema);
|
||||
export type BlockPaginationQueryParams = Static<typeof BlockPaginationQueryParamsSchema>;
|
||||
export const CompiledBlockPaginationParams = TypeCompiler.Compile(BlockPaginationQueryParamsSchema);
|
||||
|
||||
const BlocksQueryParamsSchema = Type.Union([
|
||||
PaginationParamsSchema,
|
||||
BlockPaginationQueryParamsSchema,
|
||||
Type.Composite(
|
||||
[
|
||||
Type.Object({
|
||||
burn_block_hash: Type.Union([Type.Literal('latest'), BurnBlockHashParam]),
|
||||
burn_block_hash: Type.Union([Type.Literal('latest'), BurnBlockHashParamSchema]),
|
||||
}),
|
||||
PaginationParamsSchema,
|
||||
BlockPaginationQueryParamsSchema,
|
||||
],
|
||||
{ additionalProperties: false }
|
||||
),
|
||||
Type.Composite(
|
||||
[
|
||||
Type.Object({
|
||||
burn_block_height: Type.Union([Type.Literal('latest'), BurnBlockHeightParam]),
|
||||
burn_block_height: Type.Union([Type.Literal('latest'), BurnBlockHeightParamSchema]),
|
||||
}),
|
||||
PaginationParamsSchema,
|
||||
BlockPaginationQueryParamsSchema,
|
||||
],
|
||||
{ additionalProperties: false }
|
||||
),
|
||||
]);
|
||||
export type BlocksQueryParams = Static<typeof BlocksQueryParamsSchema>;
|
||||
export const CompiledBlocksQueryParams = TypeCompiler.Compile(BlocksQueryParamsSchema);
|
||||
|
||||
const BurnBlockParamsSchema = Type.Object(
|
||||
{
|
||||
height_or_hash: Type.Union([
|
||||
Type.Literal('latest'),
|
||||
BurnBlockHashParamSchema,
|
||||
BurnBlockHeightParamSchema,
|
||||
]),
|
||||
},
|
||||
{ additionalProperties: false }
|
||||
);
|
||||
export type BurnBlockParams = Static<typeof BurnBlockParamsSchema>;
|
||||
export const CompiledBurnBlockParams = TypeCompiler.Compile(BurnBlockParamsSchema);
|
||||
|
||||
@@ -1026,6 +1026,8 @@ export interface TransferQueryResult {
|
||||
}
|
||||
|
||||
export type DbPaginatedResult<T> = {
|
||||
limit: number;
|
||||
offset: number;
|
||||
total: number;
|
||||
results: T[];
|
||||
};
|
||||
|
||||
@@ -104,7 +104,13 @@ import {
|
||||
getPgConnectionEnvValue,
|
||||
} from './connection';
|
||||
import * as path from 'path';
|
||||
import { BlockLimitParam, BlocksQueryParams } from '../api/routes/v2/schemas';
|
||||
import {
|
||||
BlockLimitParamSchema,
|
||||
BlockPaginationQueryParams,
|
||||
BlocksQueryParams,
|
||||
BurnBlockParams,
|
||||
CompiledBurnBlockHashParam,
|
||||
} from '../api/routes/v2/schemas';
|
||||
|
||||
export const MIGRATIONS_DIR = path.join(REPO_DIR, 'migrations');
|
||||
|
||||
@@ -396,31 +402,49 @@ export class PgStore extends BasePgStore {
|
||||
return { found: true, result: block } as const;
|
||||
}
|
||||
|
||||
async getBurnBlocks({
|
||||
limit,
|
||||
offset,
|
||||
height,
|
||||
hash,
|
||||
}: {
|
||||
limit: number;
|
||||
offset: number;
|
||||
height: number | null;
|
||||
hash: 'latest' | string | null;
|
||||
}): Promise<{ results: DbBurnBlock[]; total: number }> {
|
||||
async getBurnBlocks(args: BlockPaginationQueryParams): Promise<DbPaginatedResult<DbBurnBlock>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const countQuery = await sql<{ burn_block_height: number; count: number }[]>`
|
||||
SELECT burn_block_height, block_count AS count FROM chain_tip
|
||||
const limit = args.limit ?? BlockLimitParamSchema.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const blocksQuery = await sql<(DbBurnBlock & { total: number })[]>`
|
||||
WITH block_count AS (
|
||||
SELECT burn_block_height, block_count AS count FROM chain_tip
|
||||
)
|
||||
SELECT DISTINCT ON (burn_block_height)
|
||||
burn_block_time,
|
||||
burn_block_hash,
|
||||
burn_block_height,
|
||||
ARRAY_AGG(block_hash) OVER (
|
||||
PARTITION BY burn_block_height
|
||||
ORDER BY block_height DESC
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
|
||||
) AS stacks_blocks,
|
||||
(SELECT count FROM block_count)::int AS total
|
||||
FROM blocks
|
||||
WHERE canonical = true
|
||||
ORDER BY burn_block_height DESC, block_height DESC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
const heightFilter = height ? sql`AND burn_block_height = ${height}` : sql``;
|
||||
const hashFilter =
|
||||
hash === 'latest'
|
||||
? sql`AND burn_block_height = ${countQuery[0].burn_block_height}`
|
||||
: hash
|
||||
? sql`AND burn_block_hash = ${hash}`
|
||||
: sql``;
|
||||
const blocks = blocksQuery.map(r => r);
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: blocks,
|
||||
total: blocks[0].total,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
const block_count = countQuery[0].count;
|
||||
const blocksQuery = await sql<DbBurnBlock[]>`
|
||||
async getBurnBlock(args: BurnBlockParams): Promise<DbBurnBlock | undefined> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const filter =
|
||||
args.height_or_hash === 'latest'
|
||||
? sql`burn_block_hash = (SELECT burn_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
|
||||
: CompiledBurnBlockHashParam.Check(args.height_or_hash)
|
||||
? sql`burn_block_hash = ${args.height_or_hash}`
|
||||
: sql`burn_block_height = ${args.height_or_hash}`;
|
||||
const blockQuery = await sql<DbBurnBlock[]>`
|
||||
SELECT DISTINCT ON (burn_block_height)
|
||||
burn_block_time,
|
||||
burn_block_hash,
|
||||
@@ -431,18 +455,10 @@ export class PgStore extends BasePgStore {
|
||||
ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
|
||||
) AS stacks_blocks
|
||||
FROM blocks
|
||||
WHERE canonical = true
|
||||
${heightFilter}
|
||||
${hashFilter}
|
||||
ORDER BY burn_block_height DESC, block_height DESC
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
WHERE canonical = true AND ${filter}
|
||||
LIMIT 1
|
||||
`;
|
||||
const blocks = blocksQuery.map(r => r);
|
||||
return {
|
||||
results: blocks,
|
||||
total: block_count,
|
||||
};
|
||||
if (blockQuery.count > 0) return blockQuery[0];
|
||||
});
|
||||
}
|
||||
|
||||
@@ -560,7 +576,7 @@ export class PgStore extends BasePgStore {
|
||||
*/
|
||||
async getV2Blocks(args: BlocksQueryParams): Promise<DbPaginatedResult<BlockWithTransactionIds>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const limit = args.limit ?? BlockLimitParam.default;
|
||||
const limit = args.limit ?? BlockLimitParamSchema.default;
|
||||
const offset = args.offset ?? 0;
|
||||
const burnBlockHashCond =
|
||||
'burn_block_hash' in args
|
||||
@@ -622,6 +638,8 @@ export class PgStore extends BasePgStore {
|
||||
`;
|
||||
if (blocksQuery.count === 0)
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: [],
|
||||
total: 0,
|
||||
};
|
||||
@@ -630,6 +648,8 @@ export class PgStore extends BasePgStore {
|
||||
tx_ids: b.tx_ids ? b.tx_ids.split(',') : [],
|
||||
}));
|
||||
return {
|
||||
limit,
|
||||
offset,
|
||||
results: blocks,
|
||||
total: blocksQuery[0].total,
|
||||
};
|
||||
|
||||
@@ -340,7 +340,7 @@ describe('block tests', () => {
|
||||
await db.update(dbBlock);
|
||||
}
|
||||
|
||||
const result = await supertest(api.server).get(`/extended/v1/burn_block/`);
|
||||
const result = await supertest(api.server).get(`/extended/v2/burn-blocks`);
|
||||
expect(result.body.results).toEqual([
|
||||
{
|
||||
burn_block_hash: burnBlock2.burn_block_hash,
|
||||
@@ -359,44 +359,38 @@ describe('block tests', () => {
|
||||
]);
|
||||
|
||||
// test 'latest' filter
|
||||
const result2 = await supertest(api.server).get(`/extended/v1/burn_block?hash=latest`);
|
||||
expect(result2.body.results).toEqual([
|
||||
{
|
||||
burn_block_hash: stacksBlocks.at(-1)?.burn_block_hash,
|
||||
burn_block_height: stacksBlocks.at(-1)?.burn_block_height,
|
||||
burn_block_time: stacksBlocks.at(-1)?.burn_block_time,
|
||||
burn_block_time_iso: unixEpochToIso(stacksBlocks.at(-1)?.burn_block_time ?? 0),
|
||||
stacks_blocks: [stacksBlock4.block_hash, stacksBlock3.block_hash, stacksBlock2.block_hash],
|
||||
},
|
||||
]);
|
||||
const result2 = await supertest(api.server).get(`/extended/v2/burn-blocks/latest`);
|
||||
expect(result2.body).toEqual({
|
||||
burn_block_hash: stacksBlocks.at(-1)?.burn_block_hash,
|
||||
burn_block_height: stacksBlocks.at(-1)?.burn_block_height,
|
||||
burn_block_time: stacksBlocks.at(-1)?.burn_block_time,
|
||||
burn_block_time_iso: unixEpochToIso(stacksBlocks.at(-1)?.burn_block_time ?? 0),
|
||||
stacks_blocks: [stacksBlock4.block_hash, stacksBlock3.block_hash, stacksBlock2.block_hash],
|
||||
});
|
||||
|
||||
// test hash filter
|
||||
const result3 = await supertest(api.server).get(
|
||||
`/extended/v1/burn_block?hash=${stacksBlock1.burn_block_hash}`
|
||||
`/extended/v2/burn-blocks/${stacksBlock1.burn_block_hash}`
|
||||
);
|
||||
expect(result3.body.results).toEqual([
|
||||
{
|
||||
burn_block_hash: stacksBlock1.burn_block_hash,
|
||||
burn_block_height: stacksBlock1.burn_block_height,
|
||||
burn_block_time: stacksBlock1.burn_block_time,
|
||||
burn_block_time_iso: unixEpochToIso(stacksBlock1.burn_block_time),
|
||||
stacks_blocks: [stacksBlock1.block_hash],
|
||||
},
|
||||
]);
|
||||
expect(result3.body).toEqual({
|
||||
burn_block_hash: stacksBlock1.burn_block_hash,
|
||||
burn_block_height: stacksBlock1.burn_block_height,
|
||||
burn_block_time: stacksBlock1.burn_block_time,
|
||||
burn_block_time_iso: unixEpochToIso(stacksBlock1.burn_block_time),
|
||||
stacks_blocks: [stacksBlock1.block_hash],
|
||||
});
|
||||
|
||||
// test height filter
|
||||
const result4 = await supertest(api.server).get(
|
||||
`/extended/v1/burn_block?height=${stacksBlock1.burn_block_height}`
|
||||
`/extended/v2/burn-blocks/${stacksBlock1.burn_block_height}`
|
||||
);
|
||||
expect(result4.body.results).toEqual([
|
||||
{
|
||||
burn_block_hash: stacksBlock1.burn_block_hash,
|
||||
burn_block_height: stacksBlock1.burn_block_height,
|
||||
burn_block_time: stacksBlock1.burn_block_time,
|
||||
burn_block_time_iso: unixEpochToIso(stacksBlock1.burn_block_time),
|
||||
stacks_blocks: [stacksBlock1.block_hash],
|
||||
},
|
||||
]);
|
||||
expect(result4.body).toEqual({
|
||||
burn_block_hash: stacksBlock1.burn_block_hash,
|
||||
burn_block_height: stacksBlock1.burn_block_height,
|
||||
burn_block_time: stacksBlock1.burn_block_time,
|
||||
burn_block_time_iso: unixEpochToIso(stacksBlock1.burn_block_time),
|
||||
stacks_blocks: [stacksBlock1.block_hash],
|
||||
});
|
||||
});
|
||||
|
||||
test('block tx list excludes non-canonical', async () => {
|
||||
|
||||
Reference in New Issue
Block a user