fix: tx pagination ordering bug #924 (#933)

This commit is contained in:
Matthew Little
2021-12-22 19:54:58 +01:00
committed by GitHub
parent aab33a156a
commit d6587fda02
7 changed files with 166 additions and 52 deletions

View File

@@ -72,8 +72,6 @@ import {
import { readClarityValueArray, readTransactionPostConditions } from '../../p2p/tx';
import { serializePostCondition, serializePostConditionMode } from '../serializers/post-conditions';
import { getOperations, parseTransactionMemo, processUnlockingEvents } from '../../rosetta-helpers';
import { any } from 'bluebird';
import { push } from 'docker-compose';
export function parseTxTypeStrings(values: string[]): TransactionType[] {
return values.map(v => {

View File

@@ -1,6 +1,5 @@
import * as express from 'express';
import { asyncHandler } from '../async-handler';
import * as Bluebird from 'bluebird';
import { DataStore, DbTx, DbMempoolTx } from '../../datastore/common';
import {
getTxFromDataStore,
@@ -301,27 +300,19 @@ export function createTxRouter(db: DataStore): express.Router {
const { block_hash } = req.params;
const limit = parseTxQueryEventsLimit(req.query['limit'] ?? 96);
const offset = parsePagingQueryInput(req.query['offset'] ?? 0);
// TODO: use getBlockWithMetadata or similar to avoid transaction integrity issues from lazy resolving block tx data (primarily the contract-call ABI data)
const dbTxs = await db.getTxsFromBlock(block_hash, limit, offset);
const results = await Bluebird.mapSeries(dbTxs.results, async tx => {
const txQuery = await getTxFromDataStore(db, {
txId: tx.tx_id,
dbTx: tx,
includeUnanchored: true,
});
if (!txQuery.found) {
throw new Error('unexpected tx not found -- fix tx enumeration query');
}
return txQuery.result;
});
const result = await db.getTxsFromBlock({ hash: block_hash }, limit, offset);
if (!result.found) {
res.status(404).json({ error: `no block found by hash ${block_hash}` });
return;
}
const dbTxs = result.result;
const results = dbTxs.results.map(dbTx => parseDbTx(dbTx));
const response: TransactionResults = {
limit: limit,
offset: offset,
results: results,
total: dbTxs.total,
results: results,
};
if (!isProdEnv) {
const schemaPath =
@@ -338,31 +329,19 @@ export function createTxRouter(db: DataStore): express.Router {
const height = getBlockHeightPathParam(req, res, next);
const limit = parseTxQueryEventsLimit(req.query['limit'] ?? 96);
const offset = parsePagingQueryInput(req.query['offset'] ?? 0);
// TODO: use getBlockWithMetadata or similar to avoid transaction integrity issues from lazy resolving block tx data (primarily the contract-call ABI data)
const blockHash = await db.getBlock({ height: height });
if (!blockHash.found) {
const result = await db.getTxsFromBlock({ height: height }, limit, offset);
if (!result.found) {
res.status(404).json({ error: `no block found at height ${height}` });
return;
}
const dbTxs = await db.getTxsFromBlock(blockHash.result.block_hash, limit, offset);
const results = await Bluebird.mapSeries(dbTxs.results, async tx => {
const txQuery = await getTxFromDataStore(db, {
txId: tx.tx_id,
dbTx: tx,
includeUnanchored: true,
});
if (!txQuery.found) {
throw new Error('unexpected tx not found -- fix tx enumeration query');
}
return txQuery.result;
});
const dbTxs = result.result;
const results = dbTxs.results.map(dbTx => parseDbTx(dbTx));
const response: TransactionResults = {
limit: limit,
offset: offset,
results: results,
total: dbTxs.total,
results: results,
};
if (!isProdEnv) {
const schemaPath =

View File

@@ -612,10 +612,10 @@ export interface DataStore extends DataStoreEventEmitter {
getBlockTxs(indexBlockHash: string): Promise<{ results: string[] }>;
getBlockTxsRows(blockHash: string): Promise<FoundOrNot<DbTx[]>>;
getTxsFromBlock(
blockHash: string,
blockIdentifer: BlockIdentifier,
limit: number,
offset: number
): Promise<{ results: DbTx[]; total: number }>;
): Promise<FoundOrNot<{ results: DbTx[]; total: number }>>;
getMempoolTxs(args: {
txIds: string[];

View File

@@ -710,10 +710,10 @@ export class MemoryDataStore
}
getTxsFromBlock(
blockHash: string,
blockIdentifer: BlockIdentifier,
limit: number,
offset: number
): Promise<{ results: DbTx[]; total: number }> {
): Promise<FoundOrNot<{ results: DbTx[]; total: number }>> {
throw new Error('Method not implemented');
}
getMinersRewardsAtHeight({ blockHeight }: { blockHeight: number }): Promise<DbMinerReward[]> {

View File

@@ -2974,11 +2974,15 @@ export class PgDataStore
});
}
async getTxsFromBlock(blockHash: string, limit: number, offset: number) {
async getTxsFromBlock(
blockIdentifer: BlockIdentifier,
limit: number,
offset: number
): Promise<FoundOrNot<{ results: DbTx[]; total: number }>> {
return this.queryTx(async client => {
const blockQuery = await this.getBlockInternal(client, { hash: blockHash });
const blockQuery = await this.getBlockInternal(client, blockIdentifer);
if (!blockQuery.found) {
throw new Error(`Could not find block by hash ${blockHash}`);
return { found: false };
}
const totalQuery = await client.query<{ count: number }>(
`
@@ -2993,6 +2997,7 @@ export class PgDataStore
SELECT ${TX_COLUMNS}, ${abiColumn()}
FROM txs
WHERE canonical = true AND microblock_canonical = true AND index_block_hash = $1
ORDER BY microblock_sequence DESC, tx_index DESC
LIMIT $2
OFFSET $3
`,
@@ -3000,7 +3005,7 @@ export class PgDataStore
);
const total = totalQuery.rowCount > 0 ? totalQuery.rows[0].count : 0;
const parsed = result.rows.map(r => this.parseTxQueryResult(r));
return { results: parsed, total };
return { found: true, result: { results: parsed, total } };
});
}

View File

@@ -1,4 +1,5 @@
import * as supertest from 'supertest';
import * as assert from 'assert';
import {
makeContractCall,
NonFungibleConditionCode,
@@ -5083,8 +5084,12 @@ describe('api tests', () => {
expect(searchResult8.type).toBe('application/json');
expect(JSON.parse(searchResult8.text).result.metadata).toEqual(contractCallExpectedResults);
const blockTxResult = await db.getTxsFromBlock('0x1234', 20, 0);
expect(blockTxResult.results).toContainEqual({ ...contractCall, ...{ abi: contractJsonAbi } });
const blockTxResult = await db.getTxsFromBlock({ hash: '0x1234' }, 20, 0);
assert(blockTxResult.found);
expect(blockTxResult.result.results).toContainEqual({
...contractCall,
...{ abi: contractJsonAbi },
});
});
test('list contract log events', async () => {
@@ -8593,6 +8598,131 @@ describe('api tests', () => {
expect(result4.body.results.length).toBe(0);
});
test('paginate transactions by block', async () => {
let blockBuilder1 = new TestBlockBuilder();
for (let i = 0; i < 12; i++) {
blockBuilder1 = blockBuilder1.addTx({
tx_index: i,
tx_id: `0x00${i.toString().padStart(2, '0')}`,
});
}
const block1 = blockBuilder1.build();
// move around some tx insert orders
const tx1 = block1.txs[1];
const tx2 = block1.txs[5];
const tx3 = block1.txs[10];
const tx4 = block1.txs[11];
block1.txs[1] = tx4;
block1.txs[5] = tx3;
block1.txs[10] = tx2;
block1.txs[11] = tx1;
await db.update(block1);
// Insert some duplicated, non-canonical txs to ensure they don't cause issues with
// returned tx list or pagination ordering.
const nonCanonicalTx1: DbTx = { ...tx1.tx, canonical: false, microblock_hash: '0xaa' };
await db.updateTx(client, nonCanonicalTx1);
const nonCanonicalTx2: DbTx = {
...tx2.tx,
microblock_canonical: false,
microblock_hash: '0xbb',
};
await db.updateTx(client, nonCanonicalTx2);
const result1 = await supertest(api.server).get(
`/extended/v1/tx/block_height/${block1.block.block_height}?limit=4&offset=0`
);
expect(result1.status).toBe(200);
expect(result1.type).toBe('application/json');
expect(result1.body).toEqual(
expect.objectContaining({
total: 12,
limit: 4,
offset: 0,
results: expect.arrayContaining([
expect.objectContaining({
tx_id: '0x0011',
tx_index: 11,
}),
expect.objectContaining({
tx_id: '0x0010',
tx_index: 10,
}),
expect.objectContaining({
tx_id: '0x0009',
tx_index: 9,
}),
expect.objectContaining({
tx_id: '0x0008',
tx_index: 8,
}),
]),
})
);
const result2 = await supertest(api.server).get(
`/extended/v1/tx/block_height/${block1.block.block_height}?limit=4&offset=4`
);
expect(result2.status).toBe(200);
expect(result2.type).toBe('application/json');
expect(result2.body).toEqual(
expect.objectContaining({
total: 12,
limit: 4,
offset: 4,
results: expect.arrayContaining([
expect.objectContaining({
tx_id: '0x0007',
tx_index: 7,
}),
expect.objectContaining({
tx_id: '0x0006',
tx_index: 6,
}),
expect.objectContaining({
tx_id: '0x0005',
tx_index: 5,
}),
expect.objectContaining({
tx_id: '0x0004',
tx_index: 4,
}),
]),
})
);
const result3 = await supertest(api.server).get(
`/extended/v1/tx/block_height/${block1.block.block_height}?limit=4&offset=8`
);
expect(result3.status).toBe(200);
expect(result3.type).toBe('application/json');
expect(result3.body).toEqual(
expect.objectContaining({
total: 12,
limit: 4,
offset: 8,
results: expect.arrayContaining([
expect.objectContaining({
tx_id: '0x0003',
tx_index: 3,
}),
expect.objectContaining({
tx_id: '0x0002',
tx_index: 2,
}),
expect.objectContaining({
tx_id: '0x0001',
tx_index: 1,
}),
expect.objectContaining({
tx_id: '0x0000',
tx_index: 0,
}),
]),
})
);
});
test('Get fee rate', async () => {
const request: FeeRateRequest = {
transaction: '0x5e9f3933e358df6a73fec0d47ce3e1062c20812c129f5294e6f37a8d27c051d9',

View File

@@ -4483,9 +4483,10 @@ describe('postgres datastore', () => {
execution_cost_write_length: 0,
};
await db.updateTx(client, tx2);
const blockTxs = await db.getTxsFromBlock(block.block_hash, 20, 0);
expect(blockTxs.results.length).toBe(2);
expect(blockTxs.total).toBe(2);
const blockTxs = await db.getTxsFromBlock({ hash: block.block_hash }, 20, 0);
assert(blockTxs.found);
expect(blockTxs.result.results.length).toBe(2);
expect(blockTxs.result.total).toBe(2);
});
test('pg get transactions in a block: with limit and offset', async () => {
@@ -4548,9 +4549,10 @@ describe('postgres datastore', () => {
execution_cost_write_length: 0,
};
await db.updateTx(client, tx);
const blockTxs = await db.getTxsFromBlock(block.block_hash, 20, 6);
expect(blockTxs.results.length).toBe(0);
expect(blockTxs.total).toBe(1);
const blockTxs = await db.getTxsFromBlock({ hash: block.block_hash }, 20, 6);
assert(blockTxs.found);
expect(blockTxs.result.results.length).toBe(0);
expect(blockTxs.result.total).toBe(1);
});
test('pg token offering locked inserted: success', async () => {