feat: add tx_count property to /extended/v2/blocks (#1778)

* fix: move to tx_count

* docs: add new transactions endpoint docs

* fix: unused exports

* feat: transactions per block

* test: 404 on block
This commit is contained in:
Rafael Cárdenas
2023-12-19 10:02:02 -06:00
committed by GitHub
parent bf99e90fa5
commit da4cd569a5
26 changed files with 455 additions and 139 deletions

View File

@@ -10,11 +10,7 @@
"burn_block_hash": "0xb154c008df2101023a6d0d54986b3964cee58119eed14f5bed98e15678e18fe2",
"burn_block_height": 654439,
"miner_txid": "0xd7d56070277ccd87b42acf0c91f915dd181f9db4cf878a4e95518bc397c240cc",
"txs": [
"0x4262db117659d1ca9406970c8f44ffd3d8f11f8e18c591d2e3960f4070107754",
"0x383632cd3b5464dffb684082750fcfaddd1f52625bbb9f884ed8f45d2b1f0547",
"0xc99fe597e44b8bd15a50eec660c6e679a7144a5a8553d214b9d5f1406d278c22"
],
"tx_count": 3,
"execution_cost_read_count": 2477,
"execution_cost_read_length": 1659409,
"execution_cost_runtime": 2520952000,

View File

@@ -11,6 +11,7 @@
"parent_block_hash",
"parent_index_block_hash",
"txs",
"tx_count",
"burn_block_time",
"burn_block_time_iso",
"burn_block_hash",
@@ -67,13 +68,9 @@
"type": "string",
"description": "Anchor chain transaction ID"
},
"txs": {
"type": "array",
"description": "List of transactions included in the block",
"items": {
"type": "string",
"description": "Transaction ID"
}
"tx_count": {
"type": "integer",
"description": "Number of transactions included in the block"
},
"execution_cost_read_count": {
"type": "integer",

4
docs/generated.d.ts vendored
View File

@@ -1366,9 +1366,9 @@ export interface NakamotoBlock {
*/
miner_txid: string;
/**
* List of transactions included in the block
* Number of transactions included in the block
*/
txs: string[];
tx_count: number;
/**
* Execution cost read count.
*/

View File

@@ -745,6 +745,35 @@ paths:
example:
$ref: ./entities/blocks/nakamoto-block.example.json
/extended/v2/blocks/{height_or_hash}/transactions:
get:
summary: Get block transactions
description: |
Retrieves transactions confirmed in a single block
tags:
- Blocks
operationId: get_block_transactions
parameters:
- name: height_or_hash
in: path
description: filter by block height, hash, index block hash or the constant `latest` to filter for the most recent block
required: true
schema:
oneOf:
- type: integer
example: 42000
- type: string
example: "0x4839a8b01cfb39ffcc0d07d3db31e848d5adf5279d529ed5062300b9f353ff79"
responses:
200:
description: List of transactions
content:
application/json:
schema:
$ref: ./api/transaction/get-transactions.schema.json
example:
$ref: ./api/transaction/get-transactions.example.json
/extended/v1/block:
get:
summary: Get recent blocks

View File

@@ -0,0 +1,26 @@
/* eslint-disable camelcase */
exports.shorthands = undefined;
exports.up = pgm => {
pgm.addColumns('blocks', {
tx_count: {
type: 'int',
default: 1,
},
});
pgm.sql(`
UPDATE blocks SET tx_count = (
SELECT COUNT(*)::int
FROM txs
WHERE index_block_hash = blocks.index_block_hash
AND canonical = TRUE
AND microblock_canonical = TRUE
)
`);
pgm.alterColumn('blocks', 'tx_count', { notNull: true });
};
exports.down = pgm => {
pgm.dropColumn('blocks', ['tx_count']);
};

View File

@@ -5,14 +5,20 @@ import {
setETagCacheHeaders,
} from '../../../api/controllers/cache-controller';
import { asyncHandler } from '../../async-handler';
import { NakamotoBlockListResponse } from 'docs/generated';
import { NakamotoBlockListResponse, TransactionResults } from 'docs/generated';
import {
BlocksQueryParams,
BurnBlockParams,
BlockParams,
CompiledBlocksQueryParams,
CompiledBurnBlockParams,
CompiledBlockParams,
CompiledTransactionPaginationQueryParams,
TransactionPaginationQueryParams,
validRequestQuery,
validRequestParams,
} from './schemas';
import { parseDbNakamotoBlock, validRequestParams, validRequestQuery } from './helpers';
import { parseDbNakamotoBlock } from './helpers';
import { InvalidRequestError } from '../../../errors';
import { parseDbTx } from '../../../api/controllers/db-controller';
export function createV2BlocksRouter(db: PgStore): express.Router {
const router = express.Router();
@@ -41,8 +47,8 @@ export function createV2BlocksRouter(db: PgStore): express.Router {
'/:height_or_hash',
cacheHandler,
asyncHandler(async (req, res) => {
if (!validRequestParams(req, res, CompiledBurnBlockParams)) return;
const params = req.params as BurnBlockParams;
if (!validRequestParams(req, res, CompiledBlockParams)) return;
const params = req.params as BlockParams;
const block = await db.getV2Block(params);
if (!block) {
@@ -54,5 +60,40 @@ export function createV2BlocksRouter(db: PgStore): express.Router {
})
);
router.get(
'/:height_or_hash/transactions',
cacheHandler,
asyncHandler(async (req, res) => {
if (
!validRequestParams(req, res, CompiledBlockParams) ||
!validRequestQuery(req, res, CompiledTransactionPaginationQueryParams)
)
return;
const params = req.params as BlockParams;
const query = req.query as TransactionPaginationQueryParams;
try {
const { limit, offset, results, total } = await db.getV2BlockTransactions({
...params,
...query,
});
const response: TransactionResults = {
limit,
offset,
total,
results: results.map(r => parseDbTx(r)),
};
setETagCacheHeaders(res);
res.json(response);
} catch (error) {
if (error instanceof InvalidRequestError) {
res.status(404).json({ errors: error.message });
return;
}
throw error;
}
})
);
return router;
}

View File

@@ -3,12 +3,14 @@ import { BurnBlockListResponse } from '@stacks/stacks-blockchain-api-types';
import { getETagCacheHandler, setETagCacheHeaders } from '../../controllers/cache-controller';
import { asyncHandler } from '../../async-handler';
import { PgStore } from '../../../datastore/pg-store';
import { parseDbBurnBlock, validRequestParams, validRequestQuery } from './helpers';
import { parseDbBurnBlock } from './helpers';
import {
BlockPaginationQueryParams,
BurnBlockParams,
CompiledBlockPaginationParams,
CompiledBurnBlockParams,
BlockParams,
CompiledBlockPaginationQueryParams,
CompiledBlockParams,
validRequestParams,
validRequestQuery,
} from './schemas';
export function createV2BurnBlocksRouter(db: PgStore): express.Router {
@@ -19,7 +21,7 @@ export function createV2BurnBlocksRouter(db: PgStore): express.Router {
'/',
cacheHandler,
asyncHandler(async (req, res) => {
if (!validRequestQuery(req, res, CompiledBlockPaginationParams)) return;
if (!validRequestQuery(req, res, CompiledBlockPaginationQueryParams)) return;
const query = req.query as BlockPaginationQueryParams;
const { limit, offset, results, total } = await db.getBurnBlocks(query);
@@ -38,8 +40,8 @@ export function createV2BurnBlocksRouter(db: PgStore): express.Router {
'/:height_or_hash',
cacheHandler,
asyncHandler(async (req, res) => {
if (!validRequestParams(req, res, CompiledBurnBlockParams)) return;
const params = req.params as BurnBlockParams;
if (!validRequestParams(req, res, CompiledBlockParams)) return;
const params = req.params as BlockParams;
const block = await db.getBurnBlock(params);
if (!block) {

View File

@@ -1,51 +1,8 @@
import { BurnBlock, NakamotoBlock } from 'docs/generated';
import { BlockWithTransactionIds, DbBurnBlock } from '../../../datastore/common';
import { DbBlock, DbBurnBlock } from '../../../datastore/common';
import { unixEpochToIso } from '../../../helpers';
import { TypeCheck } from '@sinclair/typebox/compiler';
import { Request, Response } from 'express';
import { TSchema } from '@sinclair/typebox';
/**
* Validate request query parameters with a TypeBox compiled schema
* @param req - Request
* @param res - Response
* @param compiledType - TypeBox compiled schema
* @returns boolean
*/
export function validRequestQuery(
req: Request,
res: Response,
compiledType: TypeCheck<TSchema>
): boolean {
if (!compiledType.Check(req.query)) {
// TODO: Return a more user-friendly error
res.status(400).json({ errors: [...compiledType.Errors(req.query)] });
return false;
}
return true;
}
/**
* Validate request path parameters with a TypeBox compiled schema
* @param req - Request
* @param res - Response
* @param compiledType - TypeBox compiled schema
* @returns boolean
*/
export function validRequestParams(
req: Request,
res: Response,
compiledType: TypeCheck<TSchema>
): boolean {
if (!compiledType.Check(req.params)) {
// TODO: Return a more user-friendly error
res.status(400).json({ errors: [...compiledType.Errors(req.params)] });
return false;
}
return true;
}
export function parseDbNakamotoBlock(block: BlockWithTransactionIds): NakamotoBlock {
export function parseDbNakamotoBlock(block: DbBlock): NakamotoBlock {
const apiBlock: NakamotoBlock = {
canonical: block.canonical,
height: block.block_height,
@@ -58,7 +15,7 @@ export function parseDbNakamotoBlock(block: BlockWithTransactionIds): NakamotoBl
burn_block_hash: block.burn_block_hash,
burn_block_height: block.burn_block_height,
miner_txid: block.miner_txid,
txs: [...block.tx_ids],
tx_count: block.tx_count,
execution_cost_read_count: block.execution_cost_read_count,
execution_cost_read_length: block.execution_cost_read_length,
execution_cost_runtime: block.execution_cost_runtime,

View File

@@ -1,6 +1,50 @@
import { Type, Static, TSchema } from '@sinclair/typebox';
import { TypeCompiler } from '@sinclair/typebox/compiler';
import { ResourceType, pagingQueryLimits } from '../../../api/pagination';
import { Request, Response } from 'express';
import * as Ajv from 'ajv';
const ajv = new Ajv({ coerceTypes: true });
/**
* Validate request query parameters with a TypeBox compiled schema
* @param req - Request
* @param res - Response
* @param compiledType - Ajv compiled schema
* @returns boolean
*/
export function validRequestQuery(
req: Request,
res: Response,
compiledType: Ajv.ValidateFunction
): boolean {
if (!compiledType(req.query)) {
// TODO: Return a more user-friendly error
res.status(400).json({ errors: compiledType.errors });
return false;
}
return true;
}
/**
* Validate request path parameters with a TypeBox compiled schema
* @param req - Request
* @param res - Response
* @param compiledType - Ajv compiled schema
* @returns boolean
*/
export function validRequestParams(
req: Request,
res: Response,
compiledType: Ajv.ValidateFunction
): boolean {
if (!compiledType(req.params)) {
// TODO: Return a more user-friendly error
res.status(400).json({ errors: compiledType.errors });
return false;
}
return true;
}
// ==========================
// Parameters
@@ -20,21 +64,26 @@ export const BlockLimitParamSchema = Type.Integer({
description: 'Blocks per page',
});
export const TransactionLimitParamSchema = Type.Integer({
minimum: 1,
maximum: pagingQueryLimits[ResourceType.Tx].maxLimit,
default: pagingQueryLimits[ResourceType.Tx].defaultLimit,
title: 'Transaction limit',
description: 'Transactions per page',
});
const BurnBlockHashParamSchema = Type.RegExp(/^(0x)?[a-fA-F0-9]{64}$/i, {
title: 'Burn block hash',
description: 'Burn block hash',
examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
});
type BurnBlockHashParam = Static<typeof BurnBlockHashParamSchema>;
export const CompiledBurnBlockHashParam = TypeCompiler.Compile(BurnBlockHashParamSchema);
export const CompiledBurnBlockHashParam = ajv.compile(BurnBlockHashParamSchema);
const BurnBlockHeightParamSchema = Type.RegExp(/^[0-9]+$/, {
title: 'Burn block height',
description: 'Burn block height',
examples: ['777678'],
});
type BurnBlockHeightParam = Static<typeof BurnBlockHeightParamSchema>;
const CompiledBurnBlockHeightParam = TypeCompiler.Compile(BurnBlockHeightParamSchema);
// ==========================
// Query and path params
@@ -52,7 +101,17 @@ const PaginationQueryParamsSchema = <T extends TSchema>(t: T) =>
const BlockPaginationQueryParamsSchema = PaginationQueryParamsSchema(BlockLimitParamSchema);
export type BlockPaginationQueryParams = Static<typeof BlockPaginationQueryParamsSchema>;
export const CompiledBlockPaginationParams = TypeCompiler.Compile(BlockPaginationQueryParamsSchema);
export const CompiledBlockPaginationQueryParams = ajv.compile(BlockPaginationQueryParamsSchema);
const TransactionPaginationQueryParamsSchema = PaginationQueryParamsSchema(
TransactionLimitParamSchema
);
export type TransactionPaginationQueryParams = Static<
typeof TransactionPaginationQueryParamsSchema
>;
export const CompiledTransactionPaginationQueryParams = ajv.compile(
TransactionPaginationQueryParamsSchema
);
const BlocksQueryParamsSchema = Type.Union([
BlockPaginationQueryParamsSchema,
@@ -76,9 +135,9 @@ const BlocksQueryParamsSchema = Type.Union([
),
]);
export type BlocksQueryParams = Static<typeof BlocksQueryParamsSchema>;
export const CompiledBlocksQueryParams = TypeCompiler.Compile(BlocksQueryParamsSchema);
export const CompiledBlocksQueryParams = ajv.compile(BlocksQueryParamsSchema);
const BurnBlockParamsSchema = Type.Object(
const BlockParamsSchema = Type.Object(
{
height_or_hash: Type.Union([
Type.Literal('latest'),
@@ -88,5 +147,5 @@ const BurnBlockParamsSchema = Type.Object(
},
{ additionalProperties: false }
);
export type BurnBlockParams = Static<typeof BurnBlockParamsSchema>;
export const CompiledBurnBlockParams = TypeCompiler.Compile(BurnBlockParamsSchema);
export type BlockParams = Static<typeof BlockParamsSchema>;
export const CompiledBlockParams = ajv.compile(BlockParamsSchema);

View File

@@ -23,6 +23,7 @@ export interface DbBlock {
execution_cost_runtime: number;
execution_cost_write_count: number;
execution_cost_write_length: number;
tx_count: number;
}
/** An interface representing the microblock data that can be constructed _only_ from the /new_microblocks payload */
@@ -571,18 +572,6 @@ export interface NftEventWithTxMetadata {
tx?: DbTx;
}
export interface AddressNftEventIdentifier {
sender: string;
recipient: string;
asset_identifier: string;
value: string;
block_height: number;
tx_id: string;
event_index: number;
tx_index: number;
asset_event_type_id: number;
}
export interface DataStoreBlockUpdateData {
block: DbBlock;
microblocks: DbMicroblock[];
@@ -806,6 +795,7 @@ export interface BlockQueryResult {
execution_cost_runtime: string;
execution_cost_write_count: string;
execution_cost_write_length: string;
tx_count: number;
}
export interface MicroblockQueryResult {
@@ -1037,10 +1027,6 @@ export type DbPaginatedResult<T> = {
results: T[];
};
export type BlockWithTransactionIds = DbBlock & {
tx_ids: string[];
};
export interface BlocksWithMetadata {
results: {
block: DbBlock;
@@ -1174,6 +1160,7 @@ export interface BlockInsertValues {
execution_cost_runtime: number;
execution_cost_write_count: number;
execution_cost_write_length: number;
tx_count: number;
}
export interface MicroblockInsertValues {

View File

@@ -183,6 +183,7 @@ export const BLOCK_COLUMNS = [
'execution_cost_runtime',
'execution_cost_write_count',
'execution_cost_write_length',
'tx_count',
];
export const MICROBLOCK_COLUMNS = [
@@ -464,6 +465,7 @@ export function parseBlockQueryResult(row: BlockQueryResult): DbBlock {
execution_cost_runtime: Number.parseInt(row.execution_cost_runtime),
execution_cost_write_count: Number.parseInt(row.execution_cost_write_count),
execution_cost_write_length: Number.parseInt(row.execution_cost_write_length),
tx_count: row.tx_count,
};
return block;
}

View File

@@ -19,10 +19,8 @@ import {
} from '../helpers';
import { PgStoreEventEmitter } from './pg-store-event-emitter';
import {
AddressNftEventIdentifier,
BlockIdentifier,
BlockQueryResult,
BlockWithTransactionIds,
BlocksWithMetadata,
ContractTxQueryResult,
DbAssetEventTypeId,
@@ -73,6 +71,7 @@ import {
PoxSyntheticEventTable,
DbPoxStacker,
DbPoxSyntheticEvent,
TxQueryResult,
} from './common';
import {
abiColumn,
@@ -108,9 +107,12 @@ import {
BlockLimitParamSchema,
BlockPaginationQueryParams,
BlocksQueryParams,
BurnBlockParams,
BlockParams,
TransactionPaginationQueryParams,
TransactionLimitParamSchema,
CompiledBurnBlockHashParam,
} from '../api/routes/v2/schemas';
import { InvalidRequestError, InvalidRequestErrorType } from '../errors';
export const MIGRATIONS_DIR = path.join(REPO_DIR, 'migrations');
@@ -389,7 +391,7 @@ export class PgStore extends BasePgStore {
async getCurrentBlockInternal(sql: PgSqlClient): Promise<FoundOrNot<DbBlock>> {
const result = await sql<BlockQueryResult[]>`
SELECT ${sql(BLOCK_COLUMNS)}
SELECT ${sql(BLOCK_COLUMNS.map(c => `b.${c}`))}
FROM blocks b
INNER JOIN chain_tip t USING (index_block_hash, block_hash, block_height, burn_block_height)
LIMIT 1
@@ -436,12 +438,12 @@ export class PgStore extends BasePgStore {
});
}
async getBurnBlock(args: BurnBlockParams): Promise<DbBurnBlock | undefined> {
async getBurnBlock(args: BlockParams): Promise<DbBurnBlock | undefined> {
return await this.sqlTransaction(async sql => {
const filter =
args.height_or_hash === 'latest'
? sql`burn_block_hash = (SELECT burn_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
: CompiledBurnBlockHashParam.Check(args.height_or_hash)
: CompiledBurnBlockHashParam(args.height_or_hash)
? sql`burn_block_hash = ${args.height_or_hash}`
: sql`burn_block_height = ${args.height_or_hash}`;
const blockQuery = await sql<DbBurnBlock[]>`
@@ -573,9 +575,9 @@ export class PgStore extends BasePgStore {
/**
* Returns Block information with transaction IDs
* @returns Paginated `BlockWithTransactionIds` array
* @returns Paginated `DbBlock` array
*/
async getV2Blocks(args: BlocksQueryParams): Promise<DbPaginatedResult<BlockWithTransactionIds>> {
async getV2Blocks(args: BlocksQueryParams): Promise<DbPaginatedResult<DbBlock>> {
return await this.sqlTransaction(async sql => {
const limit = args.limit ?? BlockLimitParamSchema.default;
const offset = args.offset ?? 0;
@@ -597,7 +599,7 @@ export class PgStore extends BasePgStore {
: undefined;
// Obtain blocks and transaction counts in the same query.
const blocksQuery = await sql<(BlockQueryResult & { tx_ids: string; total: number })[]>`
const blocksQuery = await sql<(BlockQueryResult & { total: number })[]>`
WITH block_count AS (
${
'burn_block_hash' in args
@@ -609,13 +611,6 @@ export class PgStore extends BasePgStore {
)
SELECT
${sql(BLOCK_COLUMNS)},
(
SELECT STRING_AGG(tx_id,',')
FROM txs
WHERE index_block_hash = blocks.index_block_hash
AND canonical = true
AND microblock_canonical = true
) AS tx_ids,
(SELECT count FROM block_count)::int AS total
FROM blocks
WHERE canonical = true
@@ -637,10 +632,7 @@ export class PgStore extends BasePgStore {
results: [],
total: 0,
};
const blocks = blocksQuery.map(b => ({
...parseBlockQueryResult(b),
tx_ids: b.tx_ids ? b.tx_ids.split(',') : [],
}));
const blocks = blocksQuery.map(b => parseBlockQueryResult(b));
return {
limit,
offset,
@@ -650,39 +642,77 @@ export class PgStore extends BasePgStore {
});
}
async getV2Block(args: BurnBlockParams): Promise<BlockWithTransactionIds | undefined> {
async getV2Block(args: BlockParams): Promise<DbBlock | undefined> {
return await this.sqlTransaction(async sql => {
const filter =
args.height_or_hash === 'latest'
? sql`index_block_hash = (SELECT index_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
: CompiledBurnBlockHashParam.Check(args.height_or_hash)
: CompiledBurnBlockHashParam(args.height_or_hash)
? sql`(
block_hash = ${normalizeHashString(args.height_or_hash)}
OR index_block_hash = ${normalizeHashString(args.height_or_hash)}
)`
: sql`block_height = ${args.height_or_hash}`;
const blockQuery = await sql<(BlockQueryResult & { tx_ids: string })[]>`
SELECT
${sql(BLOCK_COLUMNS)},
(
SELECT STRING_AGG(tx_id,',')
FROM txs
WHERE index_block_hash = blocks.index_block_hash
AND canonical = true
AND microblock_canonical = true
) AS tx_ids
const blockQuery = await sql<BlockQueryResult[]>`
SELECT ${sql(BLOCK_COLUMNS)}
FROM blocks
WHERE canonical = true AND ${filter}
LIMIT 1
`;
if (blockQuery.count > 0)
return {
...parseBlockQueryResult(blockQuery[0]),
tx_ids: blockQuery[0].tx_ids ? blockQuery[0].tx_ids.split(',') : [],
};
if (blockQuery.count > 0) return parseBlockQueryResult(blockQuery[0]);
});
}
async getV2BlockTransactions(
args: BlockParams & TransactionPaginationQueryParams
): Promise<DbPaginatedResult<DbTx>> {
return await this.sqlTransaction(async sql => {
const limit = args.limit ?? TransactionLimitParamSchema.default;
const offset = args.offset ?? 0;
const filter =
args.height_or_hash === 'latest'
? sql`index_block_hash = (SELECT index_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
: CompiledBurnBlockHashParam(args.height_or_hash)
? sql`(
block_hash = ${normalizeHashString(args.height_or_hash)}
OR index_block_hash = ${normalizeHashString(args.height_or_hash)}
)`
: sql`block_height = ${args.height_or_hash}`;
const blockCheck = await sql`SELECT index_block_hash FROM blocks WHERE ${filter} LIMIT 1`;
if (blockCheck.count === 0)
throw new InvalidRequestError(`Block not found`, InvalidRequestErrorType.invalid_param);
const txsQuery = await sql<(TxQueryResult & { total: number })[]>`
WITH tx_count AS (
SELECT tx_count AS total FROM blocks WHERE canonical = TRUE AND ${filter}
)
SELECT ${sql(TX_COLUMNS)}, (SELECT total FROM tx_count)::int AS total
FROM txs
WHERE canonical = true
AND microblock_canonical = true
AND ${filter}
ORDER BY microblock_sequence ASC, tx_index ASC
LIMIT ${limit}
OFFSET ${offset}
`;
if (txsQuery.count === 0)
return {
limit,
offset,
results: [],
total: 0,
};
return {
limit,
offset,
results: txsQuery.map(t => parseTxQueryResult(t)),
total: txsQuery[0].total,
};
});
}
/**
* @deprecated Only used in tests
*/
async getBlockTxs(indexBlockHash: string) {
const result = await this.sql<{ tx_id: string; tx_index: number }[]>`
SELECT tx_id, tx_index

View File

@@ -444,6 +444,7 @@ export class PgWriteStore extends PgStore {
execution_cost_runtime: block.execution_cost_runtime,
execution_cost_write_count: block.execution_cost_write_count,
execution_cost_write_length: block.execution_cost_write_length,
tx_count: block.tx_count,
};
const result = await sql`
INSERT INTO blocks ${sql(values)}
@@ -2791,6 +2792,7 @@ export class PgWriteStore extends PgStore {
execution_cost_runtime: block.execution_cost_runtime,
execution_cost_write_count: block.execution_cost_write_count,
execution_cost_write_length: block.execution_cost_write_length,
tx_count: block.tx_count,
}));
await sql`
INSERT INTO blocks ${sql(values)}

View File

@@ -259,6 +259,7 @@ async function handleBlockMessage(
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: msg.transactions.length,
};
logger.debug(`Received block ${msg.block_hash} (${msg.block_height}) from node`, dbBlock);
@@ -1064,6 +1065,7 @@ export function parseNewBlockMessage(chainId: ChainID, msg: CoreNodeBlockMessage
execution_cost_runtime: totalCost.execution_cost_runtime,
execution_cost_write_count: totalCost.execution_cost_write_count,
execution_cost_write_length: totalCost.execution_cost_write_length,
tx_count: msg.transactions.length,
};
const dbMinerRewards: DbMinerReward[] = [];

View File

@@ -122,6 +122,7 @@ function testBlock(args?: TestBlockArgs): DbBlock {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
}
@@ -743,7 +744,9 @@ export class TestBlockBuilder {
}
build(): DataStoreBlockUpdateData {
return this.data;
const data = this.data;
data.block.tx_count = this.txIndex + 1;
return data;
}
}

View File

@@ -89,6 +89,7 @@ describe('address tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
let indexIdIndex = 0;
const createStxTx = (
@@ -867,6 +868,7 @@ describe('address tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
let indexIdIndex = 0;
@@ -2038,6 +2040,7 @@ describe('address tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const txBuilder = await makeContractCall({
contractAddress: 'ST11NJTTKGVT6D1HY4NJRVQWMQM7TVAR091EJ8P2Y',

View File

@@ -83,6 +83,7 @@ describe('block tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.updateBlock(client, block);
const tx: DbTxRaw = {
@@ -519,6 +520,7 @@ describe('block tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const dbTx1: DbTxRaw = {
...dbBlock,
@@ -673,7 +675,7 @@ describe('block tests', () => {
miner_txid: '0x4321',
parent_block_hash: '0x0004',
parent_index_block_hash: '0x0004',
txs: ['0x0005'],
tx_count: 1,
};
let fetch = await supertest(api.server).get(
`/extended/v2/blocks?burn_block_hash=00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee5ca3d8`
@@ -708,7 +710,7 @@ describe('block tests', () => {
miner_txid: '0x4321',
parent_block_hash: '0x0007',
parent_index_block_hash: '0x0007',
txs: ['0x0018'],
tx_count: 1,
};
fetch = await supertest(api.server).get(`/extended/v2/blocks?burn_block_hash=latest`);
json = JSON.parse(fetch.text);
@@ -770,7 +772,7 @@ describe('block tests', () => {
miner_txid: '0x4321',
parent_block_hash: '0x0000000000000000000000000000000000000000000000000000000000000004',
parent_index_block_hash: '0x0000000000000000000000000000000000000000000000000000000000000114',
txs: ['0x0005'],
tx_count: 1,
};
let fetch = await supertest(api.server).get(`/extended/v2/blocks/latest`);
let json = JSON.parse(fetch.text);

View File

@@ -95,6 +95,7 @@ describe('cache-control tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTxRaw = {
tx_id: '0x1234',

View File

@@ -273,6 +273,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTxRaw = {
tx_id: '0x1234',
@@ -436,6 +437,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTxRaw = {
tx_id: '0x1234',
@@ -605,6 +607,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.updateBlock(client, block);
const blockQuery = await db.getBlock({ hash: block.block_hash });
@@ -670,6 +673,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
let indexIdIndex = 0;
@@ -929,6 +933,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const txs1 = [
createStxTx('addrA', 'addrB', 100, dbBlock1),
@@ -1001,6 +1006,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx1: DbTxRaw = {
tx_id: '0x1234',
@@ -1966,6 +1972,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTx = {
tx_id: '0x1234',
@@ -2045,6 +2052,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTx = {
tx_id: '0x421234',
@@ -2129,6 +2137,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTx = {
tx_id: '0x421234',
@@ -2221,6 +2230,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTxRaw = {
tx_id: '0x421234',
@@ -2355,6 +2365,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTx = {
tx_id: '0x421234',
@@ -2439,6 +2450,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTx = {
tx_id: '0x421234',
@@ -2522,6 +2534,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTx = {
tx_id: '0x421234',
@@ -2604,6 +2617,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.updateBlock(client, dbBlock);
@@ -2673,6 +2687,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx1: DbTx = {
tx_id: '0x421234',
@@ -3042,6 +3057,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block2: DbBlock = {
block_hash: '0x22',
@@ -3061,6 +3077,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block3: DbBlock = {
block_hash: '0x33',
@@ -3080,6 +3097,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block3B: DbBlock = {
...block3,
@@ -3105,6 +3123,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block4: DbBlock = {
block_hash: '0x44',
@@ -3124,6 +3143,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block5: DbBlock = {
block_hash: '0x55',
@@ -3143,6 +3163,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block6: DbBlock = {
block_hash: '0x66',
@@ -3162,6 +3183,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx1Mempool: DbMempoolTxRaw = {
@@ -3349,6 +3371,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block2: DbBlock = {
block_hash: '0x22',
@@ -3368,6 +3391,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block3: DbBlock = {
block_hash: '0x33',
@@ -3387,6 +3411,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block3B: DbBlock = {
...block3,
@@ -3412,6 +3437,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const minerReward1: DbMinerReward = {
@@ -3540,6 +3566,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const reorgResult = await db.handleReorg(client, block5, 0);
@@ -3611,6 +3638,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const block2: DbBlock = {
block_hash: '0x22',
@@ -3630,6 +3658,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const minerReward1: DbMinerReward = {
@@ -3903,6 +3932,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.update({ block: block3, microblocks: [], minerRewards: [], txs: [] });
@@ -3924,6 +3954,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx3: DbTxRaw = {
tx_id: '0x03',
@@ -4129,6 +4160,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.update({ block: block3b, microblocks: [], minerRewards: [], txs: [] });
const blockQuery2 = await db.getBlock({ hash: block3b.block_hash });
@@ -4166,6 +4198,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.update({ block: block4b, microblocks: [], minerRewards: [], txs: [] });
@@ -4262,6 +4295,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx1: DbTxRaw = {
tx_id: '0x421234',
@@ -4344,6 +4378,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx1: DbTxRaw = {
tx_id: '0x421234',
@@ -4425,6 +4460,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx1: DbTxRaw = {
tx_id: '0x421234',
@@ -4577,6 +4613,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.update({
block: dbBlock,
@@ -4636,6 +4673,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.update({
block: dbBlock,
@@ -4696,6 +4734,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.update({
block: dbBlock,
@@ -4756,6 +4795,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.updateBlock(client, block);
const blockQuery = await db.getBlock({ hash: block.block_hash });
@@ -4859,6 +4899,7 @@ describe('postgres datastore', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.updateBlock(client, block);
const blockQuery = await db.getBlock({ hash: block.block_hash });

View File

@@ -500,6 +500,7 @@ describe('mempool tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const dbTx1: DbTxRaw = {
...mempoolTx1,
@@ -1190,6 +1191,7 @@ describe('mempool tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.updateBlock(client, dbBlock);
const senderAddress = 'SP25YGP221F01S9SSCGN114MKDAK9VRK8P3KXGEMB';
@@ -1262,6 +1264,7 @@ describe('mempool tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.updateBlock(client, dbBlock);
const senderAddress = 'SP25YGP221F01S9SSCGN114MKDAK9VRK8P3KXGEMB';
@@ -1480,6 +1483,7 @@ describe('mempool tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const dbBlock2: DbBlock = {
block_hash: '0x2123',
@@ -1499,6 +1503,7 @@ describe('mempool tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const mempoolTx: DbMempoolTxRaw = {
tx_id: txId,

View File

@@ -282,6 +282,7 @@ describe('microblock tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx1: DbTxRaw = {

View File

@@ -60,6 +60,7 @@ describe('other tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTxRaw = {
tx_id: '0x1234',

View File

@@ -60,6 +60,7 @@ describe('search tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.updateBlock(client, block);
const tx: DbTxRaw = {
@@ -267,6 +268,7 @@ describe('search tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx: DbTxRaw = {
@@ -597,6 +599,7 @@ describe('search tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.updateBlock(client, block);
@@ -1033,6 +1036,7 @@ describe('search tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const stxTx1: DbTxRaw = {

View File

@@ -60,6 +60,7 @@ describe('smart contract tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx1: DbTxRaw = {
tx_id: '0x421234',
@@ -208,6 +209,7 @@ describe('smart contract tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const txId1 = '0x421234';
const smartContract1: DbSmartContract = {
@@ -315,6 +317,7 @@ describe('smart contract tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const txId1 = '0x421234';
const smartContract1: DbSmartContract = {
@@ -420,6 +423,7 @@ describe('smart contract tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
const tx1: DbTxRaw = {
tx_id: '0x421235',

View File

@@ -145,6 +145,7 @@ describe('tx tests', () => {
burn_block_hash: '0x0000000000000000000342c6f7e9313ffa6f0a92618edaf86351ca265aee1c7a',
burn_block_height: 1,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 1210,
execution_cost_read_length: 1919542,
@@ -328,6 +329,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -483,6 +485,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -621,6 +624,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -759,6 +763,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -949,6 +954,7 @@ describe('tx tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
await db.update({
block: dbBlock,
@@ -1135,6 +1141,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -1333,6 +1340,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -1578,6 +1586,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -1723,6 +1732,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -1868,6 +1878,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -1983,6 +1994,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -2634,6 +2646,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -2879,6 +2892,7 @@ describe('tx tests', () => {
burn_block_hash: '0x0000000000000000000342c6f7e9313ffa6f0a92618edaf86351ca265aee1c7a',
burn_block_height: 1,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 1210,
execution_cost_read_length: 1919542,
@@ -3208,6 +3222,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,
@@ -3283,6 +3298,110 @@ describe('tx tests', () => {
expect(result1.body.results[0].index_block_hash).toBe('0xdeadbeef');
});
test('fetch transactions from v2 block', async () => {
await db.update(
new TestBlockBuilder({
block_hash: '0x00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee5ca3d8',
index_block_hash: '0xdeadbeef',
parent_index_block_hash: '0x00',
parent_block_hash: '0xff0011',
parent_microblock_hash: '',
parent_microblock_sequence: 0,
block_height: 1,
burn_block_time: 94869286,
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
canonical: true,
})
.addTx({
tx_id: '0x1234',
tx_index: 0,
nonce: 0,
type_id: DbTxTypeId.Coinbase,
status: 1,
raw_result: '0x0100000000000000000000000000000001', // u1
canonical: true,
microblock_canonical: true,
microblock_sequence: I32_MAX,
microblock_hash: '',
fee_rate: 1234n,
sender_address: 'sender-addr',
})
.addTx({
tx_id: '0x1235',
tx_index: 1,
nonce: 0,
type_id: DbTxTypeId.Coinbase,
status: 1,
raw_result: '0x0100000000000000000000000000000001', // u1
canonical: true,
microblock_canonical: true,
microblock_sequence: I32_MAX,
microblock_hash: '',
fee_rate: 1234n,
sender_address: 'sender-addr',
})
.build()
);
let result = await supertest(api.server).get(
`/extended/v2/blocks/0x00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee5ca3d8/transactions?limit=20&offset=0`
);
expect(result.status).toBe(200);
expect(result.type).toBe('application/json');
let json = JSON.parse(result.text);
expect(json.total).toBe(2);
expect(json.results[0]).toStrictEqual({
anchor_mode: 'any',
block_hash: '0x00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee5ca3d8',
block_height: 1,
burn_block_time: 94869286,
burn_block_time_iso: '1973-01-03T00:34:46.000Z',
canonical: true,
coinbase_payload: {
alt_recipient: null,
data: '0x6869',
},
event_count: 0,
events: [],
execution_cost_read_count: 0,
execution_cost_read_length: 0,
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
fee_rate: '1234',
is_unanchored: false,
microblock_canonical: true,
microblock_hash: '0x',
microblock_sequence: 2147483647,
nonce: 0,
parent_block_hash: '0x123456',
parent_burn_block_time: 94869286,
parent_burn_block_time_iso: '1973-01-03T00:34:46.000Z',
post_condition_mode: 'allow',
post_conditions: [],
sender_address: 'sender-addr',
sponsored: false,
tx_id: '0x1234',
tx_index: 0,
tx_result: {
hex: '0x0100000000000000000000000000000001',
repr: 'u1',
},
tx_status: 'success',
tx_type: 'coinbase',
});
// Try a non-existent block
result = await supertest(api.server).get(
`/extended/v2/blocks/0x00000000000000000001e2ee7f0c6bd5361b5e7afd76156ca7d6f524ee999999/transactions?limit=20&offset=0`
);
expect(result.status).toBe(404);
expect(result.type).toBe('application/json');
json = JSON.parse(result.text);
expect(json.errors).toBe('Block not found');
});
test('fetch transactions from block', async () => {
const not_updated_tx_id = '0x1111';
const tx_not_found = {
@@ -3300,6 +3419,7 @@ describe('tx tests', () => {
burn_block_hash: '0x1234',
burn_block_height: 123,
miner_txid: '0x4321',
tx_count: 1,
canonical: true,
execution_cost_read_count: 0,
execution_cost_read_length: 0,

View File

@@ -74,6 +74,7 @@ describe('v2-proxy tests', () => {
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
tx_count: 1,
};
// Ensure db has a block so that current block height queries return a found result