fix: sql transaction consistency (#1410)

* feat: savepoint transactions

* refactor: db-controller

* refactor: use transactions in all routes

* test: fix unit tests

* fix: bns test status code

* fix: name redirects

* fix: bns status tests

* fix: rosetta test suite

* ci: security patch

* fix: instead of savepoint, run queries directly

* feat: use async_hooks to check for open transactions

* fix: add usage name to transaction ctx

* feat: pg tx consistency in write store

* feat: store scoped connection in async context

* fix: revert unnecessary changes

* test: transaction consistency

* fix: bns tests

* fix: rosetta status tests

* fix: bns integration tests
This commit is contained in:
Rafael Cárdenas
2022-11-03 14:04:45 -06:00
committed by GitHub
parent 0a552b8d8c
commit 01e26d9c89
20 changed files with 1230 additions and 994 deletions

View File

@@ -300,60 +300,62 @@ export async function getRosettaBlockFromDataStore(
blockHash?: string,
blockHeight?: number
): Promise<FoundOrNot<RosettaBlock>> {
let query;
if (blockHash) {
query = db.getBlock({ hash: blockHash });
} else if (blockHeight && blockHeight > 0) {
query = db.getBlock({ height: blockHeight });
} else {
query = db.getCurrentBlock();
}
const blockQuery = await query;
if (!blockQuery.found) {
return { found: false };
}
const dbBlock = blockQuery.result;
let blockTxs = {} as FoundOrNot<RosettaTransaction[]>;
blockTxs.found = false;
if (fetchTransactions) {
blockTxs = await getRosettaBlockTransactionsFromDataStore({
blockHash: dbBlock.block_hash,
indexBlockHash: dbBlock.index_block_hash,
db,
chainId,
});
}
const parentBlockHash = dbBlock.parent_block_hash;
let parent_block_identifier: RosettaParentBlockIdentifier;
if (dbBlock.block_height <= 1) {
// case for genesis block
parent_block_identifier = {
index: dbBlock.block_height,
hash: dbBlock.block_hash,
};
} else {
const parentBlockQuery = await db.getBlock({ hash: parentBlockHash });
if (parentBlockQuery.found) {
const parentBlock = parentBlockQuery.result;
parent_block_identifier = {
index: parentBlock.block_height,
hash: parentBlock.block_hash,
};
return await db.sqlTransaction(async sql => {
let query;
if (blockHash) {
query = db.getBlock({ hash: blockHash });
} else if (blockHeight && blockHeight > 0) {
query = db.getBlock({ height: blockHeight });
} else {
query = db.getCurrentBlock();
}
const blockQuery = await query;
if (!blockQuery.found) {
return { found: false };
}
}
const dbBlock = blockQuery.result;
let blockTxs = {} as FoundOrNot<RosettaTransaction[]>;
blockTxs.found = false;
if (fetchTransactions) {
blockTxs = await getRosettaBlockTransactionsFromDataStore({
blockHash: dbBlock.block_hash,
indexBlockHash: dbBlock.index_block_hash,
db,
chainId,
});
}
const apiBlock: RosettaBlock = {
block_identifier: { index: dbBlock.block_height, hash: dbBlock.block_hash },
parent_block_identifier,
timestamp: dbBlock.burn_block_time * 1000,
transactions: blockTxs.found ? blockTxs.result : [],
};
return { found: true, result: apiBlock };
const parentBlockHash = dbBlock.parent_block_hash;
let parent_block_identifier: RosettaParentBlockIdentifier;
if (dbBlock.block_height <= 1) {
// case for genesis block
parent_block_identifier = {
index: dbBlock.block_height,
hash: dbBlock.block_hash,
};
} else {
const parentBlockQuery = await db.getBlock({ hash: parentBlockHash });
if (parentBlockQuery.found) {
const parentBlock = parentBlockQuery.result;
parent_block_identifier = {
index: parentBlock.block_height,
hash: parentBlock.block_hash,
};
} else {
return { found: false };
}
}
const apiBlock: RosettaBlock = {
block_identifier: { index: dbBlock.block_height, hash: dbBlock.block_hash },
parent_block_identifier,
timestamp: dbBlock.burn_block_time * 1000,
transactions: blockTxs.found ? blockTxs.result : [],
};
return { found: true, result: apiBlock };
});
}
export async function getUnanchoredTxsFromDataStore(db: PgStore): Promise<Transaction[]> {
@@ -408,7 +410,10 @@ export async function getMicroblocksFromDataStore(args: {
limit: number;
offset: number;
}): Promise<{ total: number; result: Microblock[] }> {
const query = await args.db.getMicroblocks({ limit: args.limit, offset: args.offset });
const query = await args.db.getMicroblocks({
limit: args.limit,
offset: args.offset,
});
const result = query.result.map(r => parseDbMicroblock(r.microblock, r.txs));
return {
total: query.total,
@@ -417,7 +422,10 @@ export async function getMicroblocksFromDataStore(args: {
}
export async function getBlocksWithMetadata(args: { limit: number; offset: number; db: PgStore }) {
const blocks = await args.db.getBlocksWithMetadata({ limit: args.limit, offset: args.offset });
const blocks = await args.db.getBlocksWithMetadata({
limit: args.limit,
offset: args.offset,
});
const results = blocks.results.map(block =>
parseDbBlock(
block.block,
@@ -499,36 +507,38 @@ async function parseRosettaTxDetail(opts: {
unlockingEvents: StxUnlockEvent[];
chainId: ChainID;
}): Promise<RosettaTransaction> {
let events: DbEvent[] = [];
if (opts.block_height > 1) {
// only return events of blocks at height greater than 1
const eventsQuery = await opts.db.getTxEvents({
txId: opts.tx.tx_id,
indexBlockHash: opts.indexBlockHash,
limit: 5000,
offset: 0,
});
events = eventsQuery.results;
}
const operations = await getOperations(
opts.tx,
opts.db,
opts.chainId,
opts.minerRewards,
events,
opts.unlockingEvents
);
const txMemo = parseTransactionMemo(opts.tx);
const rosettaTx: RosettaTransaction = {
transaction_identifier: { hash: opts.tx.tx_id },
operations: operations,
};
if (txMemo) {
rosettaTx.metadata = {
memo: txMemo,
return await opts.db.sqlTransaction(async sql => {
let events: DbEvent[] = [];
if (opts.block_height > 1) {
// only return events of blocks at height greater than 1
const eventsQuery = await opts.db.getTxEvents({
txId: opts.tx.tx_id,
indexBlockHash: opts.indexBlockHash,
limit: 5000,
offset: 0,
});
events = eventsQuery.results;
}
const operations = await getOperations(
opts.tx,
opts.db,
opts.chainId,
opts.minerRewards,
events,
opts.unlockingEvents
);
const txMemo = parseTransactionMemo(opts.tx);
const rosettaTx: RosettaTransaction = {
transaction_identifier: { hash: opts.tx.tx_id },
operations: operations,
};
}
return rosettaTx;
if (txMemo) {
rosettaTx.metadata = {
memo: txMemo,
};
}
return rosettaTx;
});
}
async function getRosettaBlockTxFromDataStore(opts: {
@@ -537,26 +547,28 @@ async function getRosettaBlockTxFromDataStore(opts: {
db: PgStore;
chainId: ChainID;
}): Promise<FoundOrNot<RosettaTransaction>> {
let minerRewards: DbMinerReward[] = [],
unlockingEvents: StxUnlockEvent[] = [];
return await opts.db.sqlTransaction(async sql => {
let minerRewards: DbMinerReward[] = [],
unlockingEvents: StxUnlockEvent[] = [];
if (opts.tx.type_id === DbTxTypeId.Coinbase) {
minerRewards = await opts.db.getMinersRewardsAtHeight({
blockHeight: opts.block.block_height,
if (opts.tx.type_id === DbTxTypeId.Coinbase) {
minerRewards = await opts.db.getMinersRewardsAtHeight({
blockHeight: opts.block.block_height,
});
unlockingEvents = await opts.db.getUnlockedAddressesAtBlock(opts.block);
}
const rosettaTx = await parseRosettaTxDetail({
block_height: opts.block.block_height,
indexBlockHash: opts.tx.index_block_hash,
tx: opts.tx,
db: opts.db,
minerRewards,
unlockingEvents,
chainId: opts.chainId,
});
unlockingEvents = await opts.db.getUnlockedAddressesAtBlock(opts.block);
}
const rosettaTx = await parseRosettaTxDetail({
block_height: opts.block.block_height,
indexBlockHash: opts.tx.index_block_hash,
tx: opts.tx,
db: opts.db,
chainId: opts.chainId,
minerRewards,
unlockingEvents,
return { found: true, result: rosettaTx };
});
return { found: true, result: rosettaTx };
}
async function getRosettaBlockTransactionsFromDataStore(opts: {
@@ -565,38 +577,40 @@ async function getRosettaBlockTransactionsFromDataStore(opts: {
db: PgStore;
chainId: ChainID;
}): Promise<FoundOrNot<RosettaTransaction[]>> {
const blockQuery = await opts.db.getBlock({ hash: opts.blockHash });
if (!blockQuery.found) {
return { found: false };
}
return await opts.db.sqlTransaction(async sql => {
const blockQuery = await opts.db.getBlock({ hash: opts.blockHash });
if (!blockQuery.found) {
return { found: false };
}
const txsQuery = await opts.db.getBlockTxsRows(opts.blockHash);
const minerRewards = await opts.db.getMinersRewardsAtHeight({
blockHeight: blockQuery.result.block_height,
});
if (!txsQuery.found) {
return { found: false };
}
const unlockingEvents = await opts.db.getUnlockedAddressesAtBlock(blockQuery.result);
const transactions: RosettaTransaction[] = [];
for (const tx of txsQuery.result) {
const rosettaTx = await parseRosettaTxDetail({
block_height: blockQuery.result.block_height,
indexBlockHash: opts.indexBlockHash,
tx,
db: opts.db,
chainId: opts.chainId,
minerRewards,
unlockingEvents,
const txsQuery = await opts.db.getBlockTxsRows(opts.blockHash);
const minerRewards = await opts.db.getMinersRewardsAtHeight({
blockHeight: blockQuery.result.block_height,
});
transactions.push(rosettaTx);
}
return { found: true, result: transactions };
if (!txsQuery.found) {
return { found: false };
}
const unlockingEvents = await opts.db.getUnlockedAddressesAtBlock(blockQuery.result);
const transactions: RosettaTransaction[] = [];
for (const tx of txsQuery.result) {
const rosettaTx = await parseRosettaTxDetail({
block_height: blockQuery.result.block_height,
indexBlockHash: opts.indexBlockHash,
tx,
db: opts.db,
minerRewards,
unlockingEvents,
chainId: opts.chainId,
});
transactions.push(rosettaTx);
}
return { found: true, result: transactions };
});
}
export async function getRosettaTransactionFromDataStore(
@@ -604,32 +618,34 @@ export async function getRosettaTransactionFromDataStore(
db: PgStore,
chainId: ChainID
): Promise<FoundOrNot<RosettaTransaction>> {
const txQuery = await db.getTx({ txId, includeUnanchored: false });
if (!txQuery.found) {
return { found: false };
}
return await db.sqlTransaction(async sql => {
const txQuery = await db.getTx({ txId, includeUnanchored: false });
if (!txQuery.found) {
return { found: false };
}
const blockQuery = await db.getBlock({ hash: txQuery.result.block_hash });
if (!blockQuery.found) {
throw new Error(
`Could not find block for tx: ${txId}, block_hash: ${txQuery.result.block_hash}, index_block_hash: ${txQuery.result.index_block_hash}`
);
}
const blockQuery = await db.getBlock({ hash: txQuery.result.block_hash });
if (!blockQuery.found) {
throw new Error(
`Could not find block for tx: ${txId}, block_hash: ${txQuery.result.block_hash}, index_block_hash: ${txQuery.result.index_block_hash}`
);
}
const rosettaTx = await getRosettaBlockTxFromDataStore({
tx: txQuery.result,
block: blockQuery.result,
db,
chainId,
const rosettaTx = await getRosettaBlockTxFromDataStore({
tx: txQuery.result,
block: blockQuery.result,
db,
chainId,
});
if (!rosettaTx.found) {
throw new Error(
`Rosetta block missing operations for tx: ${txId}, block_hash: ${txQuery.result.block_hash}, index_block_hash: ${txQuery.result.index_block_hash}`
);
}
return rosettaTx;
});
if (!rosettaTx.found) {
throw new Error(
`Rosetta block missing operations for tx: ${txId}, block_hash: ${txQuery.result.block_hash}, index_block_hash: ${txQuery.result.index_block_hash}`
);
}
return rosettaTx;
}
interface GetTxArgs {
@@ -879,228 +895,244 @@ async function getTxsFromDataStore(
db: PgStore,
args: GetTxsArgs | GetTxsWithEventsArgs
): Promise<Transaction[]> {
// fetching all requested transactions from db
const txQuery = await db.getTxListDetails({
txIds: args.txIds,
includeUnanchored: args.includeUnanchored,
return await db.sqlTransaction(async sql => {
// fetching all requested transactions from db
const txQuery = await db.getTxListDetails({
txIds: args.txIds,
includeUnanchored: args.includeUnanchored,
});
// returning empty array if no transaction was found
if (txQuery.length === 0) {
return [];
}
// parsing txQuery
const parsedTxs = txQuery.map(tx => parseDbTx(tx));
// incase transaction events are requested
if ('eventLimit' in args) {
const txIdsAndIndexHash = txQuery.map(tx => {
return {
txId: tx.tx_id,
indexBlockHash: tx.index_block_hash,
};
});
const txListEvents = await db.getTxListEvents({
txs: txIdsAndIndexHash,
limit: args.eventLimit,
offset: args.eventOffset,
});
// this will insert all events in a single parsedTransaction. Only specific ones are to be added.
const txsWithEvents: Transaction[] = parsedTxs.map(ptx => {
return {
...ptx,
events: txListEvents.results
.filter(event => event.tx_id === ptx.tx_id)
.map(event => parseDbEvent(event)),
};
});
return txsWithEvents;
} else {
return parsedTxs;
}
});
// returning empty array if no transaction was found
if (txQuery.length === 0) {
return [];
}
// parsing txQuery
const parsedTxs = txQuery.map(tx => parseDbTx(tx));
// incase transaction events are requested
if ('eventLimit' in args) {
const txIdsAndIndexHash = txQuery.map(tx => {
return {
txId: tx.tx_id,
indexBlockHash: tx.index_block_hash,
};
});
const txListEvents = await db.getTxListEvents({
txs: txIdsAndIndexHash,
limit: args.eventLimit,
offset: args.eventOffset,
});
// this will insert all events in a single parsedTransaction. Only specific ones are to be added.
const txsWithEvents: Transaction[] = parsedTxs.map(ptx => {
return {
...ptx,
events: txListEvents.results
.filter(event => event.tx_id === ptx.tx_id)
.map(event => parseDbEvent(event)),
};
});
return txsWithEvents;
} else {
return parsedTxs;
}
}
export async function getTxFromDataStore(
db: PgStore,
args: GetTxArgs | GetTxWithEventsArgs | GetTxFromDbTxArgs
): Promise<FoundOrNot<Transaction>> {
let dbTx: DbTx;
if ('dbTx' in args) {
dbTx = args.dbTx;
} else {
const txQuery = await db.getTx({ txId: args.txId, includeUnanchored: args.includeUnanchored });
if (!txQuery.found) {
return { found: false };
return await db.sqlTransaction(async sql => {
let dbTx: DbTx;
if ('dbTx' in args) {
dbTx = args.dbTx;
} else {
const txQuery = await db.getTx({
txId: args.txId,
includeUnanchored: args.includeUnanchored,
});
if (!txQuery.found) {
return { found: false };
}
dbTx = txQuery.result;
}
dbTx = txQuery.result;
}
const parsedTx = parseDbTx(dbTx);
const parsedTx = parseDbTx(dbTx);
// If tx events are requested
if ('eventLimit' in args) {
const eventsQuery = await db.getTxEvents({
txId: args.txId,
indexBlockHash: dbTx.index_block_hash,
limit: args.eventLimit,
offset: args.eventOffset,
});
const txWithEvents: Transaction = {
...parsedTx,
events: eventsQuery.results.map(event => parseDbEvent(event)),
};
return { found: true, result: txWithEvents };
} else {
return {
found: true,
result: parsedTx,
};
}
// If tx events are requested
if ('eventLimit' in args) {
const eventsQuery = await db.getTxEvents({
txId: args.txId,
indexBlockHash: dbTx.index_block_hash,
limit: args.eventLimit,
offset: args.eventOffset,
});
const txWithEvents: Transaction = {
...parsedTx,
events: eventsQuery.results.map(event => parseDbEvent(event)),
};
return { found: true, result: txWithEvents };
} else {
return {
found: true,
result: parsedTx,
};
}
});
}
export async function searchTxs(
db: PgStore,
args: GetTxsArgs | GetTxsWithEventsArgs
): Promise<TransactionList> {
const minedTxs = await getTxsFromDataStore(db, args);
return await db.sqlTransaction(async sql => {
const minedTxs = await getTxsFromDataStore(db, args);
const foundTransactions: TransactionFound[] = [];
const mempoolTxs: string[] = [];
minedTxs.forEach(tx => {
// filtering out mined transactions in canonical chain
if (tx.canonical && tx.microblock_canonical) {
foundTransactions.push({ found: true, result: tx });
}
// filtering out non canonical transactions to look into mempool table
if (!tx.canonical && !tx.microblock_canonical) {
mempoolTxs.push(tx.tx_id);
}
});
// filtering out tx_ids that were not mined / found
const notMinedTransactions: string[] = args.txIds.filter(
txId => !minedTxs.find(minedTx => txId === minedTx.tx_id)
);
// finding transactions that are not mined and are not canonical in mempool
mempoolTxs.push(...notMinedTransactions);
const mempoolTxsQuery = await getMempoolTxsFromDataStore(db, {
txIds: mempoolTxs,
includeUnanchored: args.includeUnanchored,
});
// merging found mempool transaction in found transactions object
foundTransactions.push(
...mempoolTxsQuery.map((mtx: Transaction | MempoolTransaction) => {
return { found: true, result: mtx } as TransactionFound;
})
);
// filtering out transactions that were not found anywhere
const notFoundTransactions: TransactionNotFound[] = args.txIds
.filter(txId => foundTransactions.findIndex(ftx => ftx.result?.tx_id === txId) < 0)
.map(txId => {
return { found: false, result: { tx_id: txId } };
const foundTransactions: TransactionFound[] = [];
const mempoolTxs: string[] = [];
minedTxs.forEach(tx => {
// filtering out mined transactions in canonical chain
if (tx.canonical && tx.microblock_canonical) {
foundTransactions.push({ found: true, result: tx });
}
// filtering out non canonical transactions to look into mempool table
if (!tx.canonical && !tx.microblock_canonical) {
mempoolTxs.push(tx.tx_id);
}
});
// generating response
const resp = [...foundTransactions, ...notFoundTransactions].reduce(
(map: TransactionList, obj) => {
if (obj.result) {
map[obj.result.tx_id] = obj;
}
return map;
},
{}
);
return resp;
// filtering out tx_ids that were not mined / found
const notMinedTransactions: string[] = args.txIds.filter(
txId => !minedTxs.find(minedTx => txId === minedTx.tx_id)
);
// finding transactions that are not mined and are not canonical in mempool
mempoolTxs.push(...notMinedTransactions);
const mempoolTxsQuery = await getMempoolTxsFromDataStore(db, {
txIds: mempoolTxs,
includeUnanchored: args.includeUnanchored,
});
// merging found mempool transaction in found transactions object
foundTransactions.push(
...mempoolTxsQuery.map((mtx: Transaction | MempoolTransaction) => {
return { found: true, result: mtx } as TransactionFound;
})
);
// filtering out transactions that were not found anywhere
const notFoundTransactions: TransactionNotFound[] = args.txIds
.filter(txId => foundTransactions.findIndex(ftx => ftx.result?.tx_id === txId) < 0)
.map(txId => {
return { found: false, result: { tx_id: txId } };
});
// generating response
const resp = [...foundTransactions, ...notFoundTransactions].reduce(
(map: TransactionList, obj) => {
if (obj.result) {
map[obj.result.tx_id] = obj;
}
return map;
},
{}
);
return resp;
});
}
export async function searchTx(
db: PgStore,
args: GetTxArgs | GetTxWithEventsArgs
): Promise<FoundOrNot<Transaction | MempoolTransaction>> {
// First, check the happy path: the tx is mined and in the canonical chain.
const minedTxs = await getTxsFromDataStore(db, { ...args, txIds: [args.txId] });
const minedTx = minedTxs[0] ?? undefined;
if (minedTx && minedTx.canonical && minedTx.microblock_canonical) {
return { found: true, result: minedTx };
} else {
// Otherwise, if not mined or not canonical, check in the mempool.
const mempoolTxQuery = await getMempoolTxsFromDataStore(db, { ...args, txIds: [args.txId] });
const mempoolTx = mempoolTxQuery[0] ?? undefined;
if (mempoolTx) {
return { found: true, result: mempoolTx };
}
// Fallback for a situation where the tx was only mined in a non-canonical chain, but somehow not in the mempool table.
else if (minedTx) {
logger.warn(`Tx only exists in a non-canonical chain, missing from mempool: ${args.txId}`);
return await db.sqlTransaction(async sql => {
// First, check the happy path: the tx is mined and in the canonical chain.
const minedTxs = await getTxsFromDataStore(db, { ...args, txIds: [args.txId] });
const minedTx = minedTxs[0] ?? undefined;
if (minedTx && minedTx.canonical && minedTx.microblock_canonical) {
return { found: true, result: minedTx };
} else {
// Otherwise, if not mined or not canonical, check in the mempool.
const mempoolTxQuery = await getMempoolTxsFromDataStore(db, {
...args,
txIds: [args.txId],
});
const mempoolTx = mempoolTxQuery[0] ?? undefined;
if (mempoolTx) {
return { found: true, result: mempoolTx };
}
// Fallback for a situation where the tx was only mined in a non-canonical chain, but somehow not in the mempool table.
else if (minedTx) {
logger.warn(`Tx only exists in a non-canonical chain, missing from mempool: ${args.txId}`);
return { found: true, result: minedTx };
}
// Tx not found in db.
else {
return { found: false };
}
}
// Tx not found in db.
else {
return { found: false };
}
}
});
}
export async function searchHashWithMetadata(
hash: string,
db: PgStore
): Promise<FoundOrNot<DbSearchResultWithMetadata>> {
// checking for tx
const txQuery = await db.getTxListDetails({ txIds: [hash], includeUnanchored: true });
if (txQuery.length > 0) {
// tx found
const tx = txQuery[0];
return {
found: true,
result: {
entity_type: 'tx_id',
entity_id: tx.tx_id,
entity_data: tx,
},
};
}
// checking for mempool tx
const mempoolTxQuery = await db.getMempoolTxs({
txIds: [hash],
includeUnanchored: true,
includePruned: true,
return await db.sqlTransaction(async sql => {
// checking for tx
const txQuery = await db.getTxListDetails({ txIds: [hash], includeUnanchored: true });
if (txQuery.length > 0) {
// tx found
const tx = txQuery[0];
return {
found: true,
result: {
entity_type: 'tx_id',
entity_id: tx.tx_id,
entity_data: tx,
},
};
}
// checking for mempool tx
const mempoolTxQuery = await db.getMempoolTxs({
txIds: [hash],
includeUnanchored: true,
includePruned: true,
});
if (mempoolTxQuery.length > 0) {
// mempool tx found
const mempoolTx = mempoolTxQuery[0];
return {
found: true,
result: {
entity_type: 'mempool_tx_id',
entity_id: mempoolTx.tx_id,
entity_data: mempoolTx,
},
};
}
// checking for block
const blockQuery = await db.getBlockWithMetadata({ hash }, { txs: true, microblocks: true });
if (blockQuery.found) {
// block found
const result = parseDbBlock(
blockQuery.result.block,
blockQuery.result.txs.map(tx => tx.tx_id),
blockQuery.result.microblocks.accepted.map(mb => mb.microblock_hash),
blockQuery.result.microblocks.streamed.map(mb => mb.microblock_hash),
blockQuery.result.microblock_tx_count
);
return {
found: true,
result: {
entity_type: 'block_hash',
entity_id: result.hash,
entity_data: result,
},
};
}
// found nothing
return { found: false };
});
if (mempoolTxQuery.length > 0) {
// mempool tx found
const mempoolTx = mempoolTxQuery[0];
return {
found: true,
result: {
entity_type: 'mempool_tx_id',
entity_id: mempoolTx.tx_id,
entity_data: mempoolTx,
},
};
}
// checking for block
const blockQuery = await db.getBlockWithMetadata({ hash }, { txs: true, microblocks: true });
if (blockQuery.found) {
// block found
const result = parseDbBlock(
blockQuery.result.block,
blockQuery.result.txs.map(tx => tx.tx_id),
blockQuery.result.microblocks.accepted.map(mb => mb.microblock_hash),
blockQuery.result.microblocks.streamed.map(mb => mb.microblock_hash),
blockQuery.result.microblock_tx_count
);
return {
found: true,
result: {
entity_type: 'block_hash',
entity_id: result.hash,
entity_data: result,
},
};
}
// found nothing
return { found: false };
}

View File

@@ -10,7 +10,6 @@ import {
validatePrincipal,
} from '../query-helpers';
import {
bufferToHexPrefixString,
formatMapToObject,
getSendManyContract,
has0xPrefix,
@@ -51,6 +50,7 @@ import {
setETagCacheHeaders,
} from '../controllers/cache-controller';
import { PgStore } from '../../datastore/pg-store';
import { PgSqlClient } from '../../datastore/connection';
const MAX_TX_PER_REQUEST = 50;
const MAX_ASSETS_PER_REQUEST = 50;
@@ -126,27 +126,28 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout
validatePrincipal(stxAddress);
const untilBlock = parseUntilBlockQuery(req, res, next);
const blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
// Get balance info for STX token
const stxBalanceResult = await db.getStxBalanceAtBlock(stxAddress, blockHeight);
const tokenOfferingLocked = await db.getTokenOfferingLocked(stxAddress, blockHeight);
const result: AddressStxBalanceResponse = {
balance: stxBalanceResult.balance.toString(),
total_sent: stxBalanceResult.totalSent.toString(),
total_received: stxBalanceResult.totalReceived.toString(),
total_fees_sent: stxBalanceResult.totalFeesSent.toString(),
total_miner_rewards_received: stxBalanceResult.totalMinerRewardsReceived.toString(),
lock_tx_id: stxBalanceResult.lockTxId,
locked: stxBalanceResult.locked.toString(),
lock_height: stxBalanceResult.lockHeight,
burnchain_lock_height: stxBalanceResult.burnchainLockHeight,
burnchain_unlock_height: stxBalanceResult.burnchainUnlockHeight,
};
if (tokenOfferingLocked.found) {
result.token_offering_locked = tokenOfferingLocked.result;
}
const result = await db.sqlTransaction(async sql => {
const blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
// Get balance info for STX token
const stxBalanceResult = await db.getStxBalanceAtBlock(stxAddress, blockHeight);
const tokenOfferingLocked = await db.getTokenOfferingLocked(stxAddress, blockHeight);
const result: AddressStxBalanceResponse = {
balance: stxBalanceResult.balance.toString(),
total_sent: stxBalanceResult.totalSent.toString(),
total_received: stxBalanceResult.totalReceived.toString(),
total_fees_sent: stxBalanceResult.totalFeesSent.toString(),
total_miner_rewards_received: stxBalanceResult.totalMinerRewardsReceived.toString(),
lock_tx_id: stxBalanceResult.lockTxId,
locked: stxBalanceResult.locked.toString(),
lock_height: stxBalanceResult.lockHeight,
burnchain_lock_height: stxBalanceResult.burnchainLockHeight,
burnchain_unlock_height: stxBalanceResult.burnchainUnlockHeight,
};
if (tokenOfferingLocked.found) {
result.token_offering_locked = tokenOfferingLocked.result;
}
return result;
});
setETagCacheHeaders(res);
res.json(result);
})
@@ -159,61 +160,63 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout
asyncHandler(async (req, res, next) => {
const stxAddress = req.params['stx_address'];
validatePrincipal(stxAddress);
const untilBlock = parseUntilBlockQuery(req, res, next);
const blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
// Get balance info for STX token
const stxBalanceResult = await db.getStxBalanceAtBlock(stxAddress, blockHeight);
const tokenOfferingLocked = await db.getTokenOfferingLocked(stxAddress, blockHeight);
const result = await db.sqlTransaction(async sql => {
const blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
// Get balances for fungible tokens
const ftBalancesResult = await db.getFungibleTokenBalances({
stxAddress,
untilBlock: blockHeight,
});
const ftBalances = formatMapToObject(ftBalancesResult, val => {
return {
balance: val.balance.toString(),
total_sent: val.totalSent.toString(),
total_received: val.totalReceived.toString(),
// Get balance info for STX token
const stxBalanceResult = await db.getStxBalanceAtBlock(stxAddress, blockHeight);
const tokenOfferingLocked = await db.getTokenOfferingLocked(stxAddress, blockHeight);
// Get balances for fungible tokens
const ftBalancesResult = await db.getFungibleTokenBalances({
stxAddress,
untilBlock: blockHeight,
});
const ftBalances = formatMapToObject(ftBalancesResult, val => {
return {
balance: val.balance.toString(),
total_sent: val.totalSent.toString(),
total_received: val.totalReceived.toString(),
};
});
// Get counts for non-fungible tokens
const nftBalancesResult = await db.getNonFungibleTokenCounts({
stxAddress,
untilBlock: blockHeight,
});
const nftBalances = formatMapToObject(nftBalancesResult, val => {
return {
count: val.count.toString(),
total_sent: val.totalSent.toString(),
total_received: val.totalReceived.toString(),
};
});
const result: AddressBalanceResponse = {
stx: {
balance: stxBalanceResult.balance.toString(),
total_sent: stxBalanceResult.totalSent.toString(),
total_received: stxBalanceResult.totalReceived.toString(),
total_fees_sent: stxBalanceResult.totalFeesSent.toString(),
total_miner_rewards_received: stxBalanceResult.totalMinerRewardsReceived.toString(),
lock_tx_id: stxBalanceResult.lockTxId,
locked: stxBalanceResult.locked.toString(),
lock_height: stxBalanceResult.lockHeight,
burnchain_lock_height: stxBalanceResult.burnchainLockHeight,
burnchain_unlock_height: stxBalanceResult.burnchainUnlockHeight,
},
fungible_tokens: ftBalances,
non_fungible_tokens: nftBalances,
};
if (tokenOfferingLocked.found) {
result.token_offering_locked = tokenOfferingLocked.result;
}
return result;
});
// Get counts for non-fungible tokens
const nftBalancesResult = await db.getNonFungibleTokenCounts({
stxAddress,
untilBlock: blockHeight,
});
const nftBalances = formatMapToObject(nftBalancesResult, val => {
return {
count: val.count.toString(),
total_sent: val.totalSent.toString(),
total_received: val.totalReceived.toString(),
};
});
const result: AddressBalanceResponse = {
stx: {
balance: stxBalanceResult.balance.toString(),
total_sent: stxBalanceResult.totalSent.toString(),
total_received: stxBalanceResult.totalReceived.toString(),
total_fees_sent: stxBalanceResult.totalFeesSent.toString(),
total_miner_rewards_received: stxBalanceResult.totalMinerRewardsReceived.toString(),
lock_tx_id: stxBalanceResult.lockTxId,
locked: stxBalanceResult.locked.toString(),
lock_height: stxBalanceResult.lockHeight,
burnchain_lock_height: stxBalanceResult.burnchainLockHeight,
burnchain_unlock_height: stxBalanceResult.burnchainUnlockHeight,
},
fungible_tokens: ftBalances,
non_fungible_tokens: nftBalances,
};
if (tokenOfferingLocked.found) {
result.token_offering_locked = tokenOfferingLocked.result;
}
setETagCacheHeaders(res);
res.json(result);
})
@@ -229,35 +232,38 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout
asyncHandler(async (req, res, next) => {
const principal = req.params['principal'];
validatePrincipal(principal);
const untilBlock = parseUntilBlockQuery(req, res, next);
const blockParams = getBlockParams(req, res, next);
let atSingleBlock = false;
let blockHeight = 0;
if (blockParams.blockHeight) {
if (untilBlock) {
throw new InvalidRequestError(
`can't handle until_block and block_height in the same request`,
InvalidRequestErrorType.invalid_param
);
}
atSingleBlock = true;
blockHeight = blockParams.blockHeight;
} else {
blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
}
const limit = parseTxQueryLimit(req.query.limit ?? 20);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const { results: txResults, total } = await db.getAddressTxs({
stxAddress: principal,
limit,
offset,
blockHeight,
atSingleBlock,
const response = await db.sqlTransaction(async sql => {
const blockParams = getBlockParams(req, res, next);
let atSingleBlock = false;
let blockHeight = 0;
if (blockParams.blockHeight) {
if (untilBlock) {
throw new InvalidRequestError(
`can't handle until_block and block_height in the same request`,
InvalidRequestErrorType.invalid_param
);
}
atSingleBlock = true;
blockHeight = blockParams.blockHeight;
} else {
blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
}
const { results: txResults, total } = await db.getAddressTxs({
stxAddress: principal,
limit,
offset,
blockHeight,
atSingleBlock,
});
const results = txResults.map(dbTx => parseDbTx(dbTx));
const response: TransactionResults = { limit, offset, total, results };
return response;
});
const results = txResults.map(dbTx => parseDbTx(dbTx));
const response: TransactionResults = { limit, offset, total, results };
setETagCacheHeaders(res);
res.json(response);
})
@@ -273,29 +279,36 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout
if (!has0xPrefix(tx_id)) {
tx_id = '0x' + tx_id;
}
const results = await db.getInformationTxsWithStxTransfers({ stxAddress, tx_id });
if (results && results.tx) {
const txQuery = await getTxFromDataStore(db, {
txId: results.tx.tx_id,
dbTx: results.tx,
includeUnanchored: false,
});
if (!txQuery.found) {
throw new Error('unexpected tx not found -- fix tx enumeration query');
const result = await db.sqlTransaction(async sql => {
const results = await db.getInformationTxsWithStxTransfers({ stxAddress, tx_id });
if (results && results.tx) {
const txQuery = await getTxFromDataStore(db, {
txId: results.tx.tx_id,
dbTx: results.tx,
includeUnanchored: false,
});
if (!txQuery.found) {
throw new Error('unexpected tx not found -- fix tx enumeration query');
}
const result: AddressTransactionWithTransfers = {
tx: txQuery.result,
stx_sent: results.stx_sent.toString(),
stx_received: results.stx_received.toString(),
stx_transfers: results.stx_transfers.map(transfer => ({
amount: transfer.amount.toString(),
sender: transfer.sender,
recipient: transfer.recipient,
})),
};
return result;
}
const result: AddressTransactionWithTransfers = {
tx: txQuery.result,
stx_sent: results.stx_sent.toString(),
stx_received: results.stx_received.toString(),
stx_transfers: results.stx_transfers.map(transfer => ({
amount: transfer.amount.toString(),
sender: transfer.sender,
recipient: transfer.recipient,
})),
};
});
if (result) {
setETagCacheHeaders(res);
res.json(result);
} else res.status(404).json({ error: 'No matching transaction found' });
} else {
res.status(404).json({ error: 'No matching transaction found' });
}
})
);
@@ -305,81 +318,83 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout
asyncHandler(async (req, res, next) => {
const stxAddress = req.params['stx_address'];
validatePrincipal(stxAddress);
const untilBlock = parseUntilBlockQuery(req, res, next);
const blockParams = getBlockParams(req, res, next);
let atSingleBlock = false;
let blockHeight = 0;
if (blockParams.blockHeight) {
if (untilBlock) {
throw new InvalidRequestError(
`can't handle until_block and block_height in the same request`,
InvalidRequestErrorType.invalid_param
);
}
atSingleBlock = true;
blockHeight = blockParams.blockHeight;
} else {
blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
}
const limit = parseTxQueryLimit(req.query.limit ?? 20);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const { results: txResults, total } = await db.getAddressTxsWithAssetTransfers({
stxAddress: stxAddress,
limit,
offset,
blockHeight,
atSingleBlock,
});
// TODO: use getBlockWithMetadata or similar to avoid transaction integrity issues from lazy resolving block tx data (primarily the contract-call ABI data)
const results = await Bluebird.mapSeries(txResults, async entry => {
const txQuery = await getTxFromDataStore(db, {
txId: entry.tx.tx_id,
dbTx: entry.tx,
includeUnanchored: blockParams.includeUnanchored ?? false,
});
if (!txQuery.found) {
throw new Error('unexpected tx not found -- fix tx enumeration query');
const response = await db.sqlTransaction(async sql => {
const blockParams = getBlockParams(req, res, next);
let atSingleBlock = false;
let blockHeight = 0;
if (blockParams.blockHeight) {
if (untilBlock) {
throw new InvalidRequestError(
`can't handle until_block and block_height in the same request`,
InvalidRequestErrorType.invalid_param
);
}
atSingleBlock = true;
blockHeight = blockParams.blockHeight;
} else {
blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
}
const result: AddressTransactionWithTransfers = {
tx: txQuery.result,
stx_sent: entry.stx_sent.toString(),
stx_received: entry.stx_received.toString(),
stx_transfers: entry.stx_transfers.map(transfer => ({
amount: transfer.amount.toString(),
sender: transfer.sender,
recipient: transfer.recipient,
})),
ft_transfers: entry.ft_transfers.map(transfer => ({
asset_identifier: transfer.asset_identifier,
amount: transfer.amount.toString(),
sender: transfer.sender,
recipient: transfer.recipient,
})),
nft_transfers: entry.nft_transfers.map(transfer => {
const parsedClarityValue = decodeClarityValueToRepr(transfer.value);
const nftTransfer = {
asset_identifier: transfer.asset_identifier,
value: {
hex: transfer.value,
repr: parsedClarityValue,
},
const limit = parseTxQueryLimit(req.query.limit ?? 20);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const { results: txResults, total } = await db.getAddressTxsWithAssetTransfers({
stxAddress: stxAddress,
limit,
offset,
blockHeight,
atSingleBlock,
});
const results = await Bluebird.mapSeries(txResults, async entry => {
const txQuery = await getTxFromDataStore(db, {
txId: entry.tx.tx_id,
dbTx: entry.tx,
includeUnanchored: blockParams.includeUnanchored ?? false,
});
if (!txQuery.found) {
throw new Error('unexpected tx not found -- fix tx enumeration query');
}
const result: AddressTransactionWithTransfers = {
tx: txQuery.result,
stx_sent: entry.stx_sent.toString(),
stx_received: entry.stx_received.toString(),
stx_transfers: entry.stx_transfers.map(transfer => ({
amount: transfer.amount.toString(),
sender: transfer.sender,
recipient: transfer.recipient,
};
return nftTransfer;
}),
};
return result;
});
})),
ft_transfers: entry.ft_transfers.map(transfer => ({
asset_identifier: transfer.asset_identifier,
amount: transfer.amount.toString(),
sender: transfer.sender,
recipient: transfer.recipient,
})),
nft_transfers: entry.nft_transfers.map(transfer => {
const parsedClarityValue = decodeClarityValueToRepr(transfer.value);
const nftTransfer = {
asset_identifier: transfer.asset_identifier,
value: {
hex: transfer.value,
repr: parsedClarityValue,
},
sender: transfer.sender,
recipient: transfer.recipient,
};
return nftTransfer;
}),
};
return result;
});
const response: AddressTransactionsWithTransfersListResponse = {
limit,
offset,
total,
results,
};
const response: AddressTransactionsWithTransfersListResponse = {
limit,
offset,
total,
results,
};
return response;
});
setETagCacheHeaders(res);
res.json(response);
})
@@ -393,18 +408,21 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout
const stxAddress = req.params['stx_address'];
validatePrincipal(stxAddress);
const untilBlock = parseUntilBlockQuery(req, res, next);
const blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
const limit = parseAssetsQueryLimit(req.query.limit ?? 20);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const { results: assetEvents, total } = await db.getAddressAssetEvents({
stxAddress,
limit,
offset,
blockHeight,
const response = await db.sqlTransaction(async sql => {
const blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
const { results: assetEvents, total } = await db.getAddressAssetEvents({
stxAddress,
limit,
offset,
blockHeight,
});
const results = assetEvents.map(event => parseDbEvent(event));
const response: AddressAssetEvents = { limit, offset, total, results };
return response;
});
const results = assetEvents.map(event => parseDbEvent(event));
const response: AddressAssetEvents = { limit, offset, total, results };
setETagCacheHeaders(res);
res.json(response);
})
@@ -425,48 +443,51 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout
}
validatePrincipal(stxAddress);
let atSingleBlock = false;
const untilBlock = parseUntilBlockQuery(req, res, next);
const blockParams = getBlockParams(req, res, next);
let blockHeight = 0;
if (blockParams.blockHeight) {
if (untilBlock) {
throw new InvalidRequestError(
`can't handle until_block and block_height in the same request`,
InvalidRequestErrorType.invalid_param
);
const response = await db.sqlTransaction(async sql => {
let atSingleBlock = false;
const untilBlock = parseUntilBlockQuery(req, res, next);
const blockParams = getBlockParams(req, res, next);
let blockHeight = 0;
if (blockParams.blockHeight) {
if (untilBlock) {
throw new InvalidRequestError(
`can't handle until_block and block_height in the same request`,
InvalidRequestErrorType.invalid_param
);
}
atSingleBlock = true;
blockHeight = blockParams.blockHeight;
} else {
blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
}
atSingleBlock = true;
blockHeight = blockParams.blockHeight;
} else {
blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
}
const limit = parseStxInboundLimit(req.query.limit ?? 20);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const { results, total } = await db.getInboundTransfers({
stxAddress,
limit,
offset,
sendManyContractId,
blockHeight,
atSingleBlock,
const limit = parseStxInboundLimit(req.query.limit ?? 20);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const { results, total } = await db.getInboundTransfers({
stxAddress,
limit,
offset,
sendManyContractId,
blockHeight,
atSingleBlock,
});
const transfers: InboundStxTransfer[] = results.map(r => ({
sender: r.sender,
amount: r.amount.toString(),
memo: r.memo,
block_height: r.block_height,
tx_id: r.tx_id,
transfer_type: r.transfer_type as InboundStxTransfer['transfer_type'],
tx_index: r.tx_index,
}));
const response: AddressStxInboundListResponse = {
results: transfers,
total: total,
limit,
offset,
};
return response;
});
const transfers: InboundStxTransfer[] = results.map(r => ({
sender: r.sender,
amount: r.amount.toString(),
memo: r.memo,
block_height: r.block_height,
tx_id: r.tx_id,
transfer_type: r.transfer_type as InboundStxTransfer['transfer_type'],
tx_index: r.tx_index,
}));
const response: AddressStxInboundListResponse = {
results: transfers,
total: total,
limit,
offset,
};
setETagCacheHeaders(res);
res.json(response);
} catch (error) {
@@ -486,44 +507,47 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout
// get recent asset event associated with address
const stxAddress = req.params['stx_address'];
validatePrincipal(stxAddress);
const untilBlock = parseUntilBlockQuery(req, res, next);
const blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
const limit = parseAssetsQueryLimit(req.query.limit ?? 20);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const includeUnanchored = isUnanchoredRequest(req, res, next);
const untilBlock = parseUntilBlockQuery(req, res, next);
const response = await db.getAddressNFTEvent({
stxAddress,
limit,
offset,
blockHeight,
includeUnanchored,
});
const nft_events = response.results.map(row => {
const parsedClarityValue = decodeClarityValueToRepr(row.value);
const r: NftEvent = {
sender: row.sender,
recipient: row.recipient,
asset_identifier: row.asset_identifier,
value: {
hex: row.value,
repr: parsedClarityValue,
},
tx_id: row.tx_id,
block_height: row.block_height,
event_index: row.event_index,
asset_event_type: getAssetEventTypeString(row.asset_event_type_id),
tx_index: row.tx_index,
const nftListResponse = await db.sqlTransaction(async sql => {
const blockHeight = await getBlockHeight(untilBlock, req, res, next, db);
const response = await db.getAddressNFTEvent({
stxAddress,
limit,
offset,
blockHeight,
includeUnanchored,
});
const nft_events = response.results.map(row => {
const parsedClarityValue = decodeClarityValueToRepr(row.value);
const r: NftEvent = {
sender: row.sender,
recipient: row.recipient,
asset_identifier: row.asset_identifier,
value: {
hex: row.value,
repr: parsedClarityValue,
},
tx_id: row.tx_id,
block_height: row.block_height,
event_index: row.event_index,
asset_event_type: getAssetEventTypeString(row.asset_event_type_id),
tx_index: row.tx_index,
};
return r;
});
const nftListResponse: AddressNftListResponse = {
nft_events: nft_events,
total: response.total,
limit: limit,
offset: offset,
};
return r;
return nftListResponse;
});
const nftListResponse: AddressNftListResponse = {
nft_events: nft_events,
total: response.total,
limit: limit,
offset: offset,
};
setETagCacheHeaders(res);
res.json(nftListResponse);
})
@@ -614,9 +638,7 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout
setETagCacheHeaders(res);
res.json(results);
} else {
const nonces = await db.getAddressNonces({
stxAddress,
});
const nonces = await db.getAddressNonces({ stxAddress });
const results: AddressNonces = {
last_executed_tx_nonce: nonces.lastExecutedTxNonce as number,
last_mempool_tx_nonce: nonces.lastMempoolTxNonce as number,

View File

@@ -11,6 +11,14 @@ import {
setETagCacheHeaders,
} from '../../../api/controllers/cache-controller';
class NameRedirectError extends Error {
constructor(message: string) {
super(message);
this.message = message;
this.name = this.constructor.name;
}
}
export function createBnsNamesRouter(db: PgStore, chainId: ChainID): express.Router {
const router = express.Router();
const cacheHandler = getETagCacheHandler(db);
@@ -85,64 +93,77 @@ export function createBnsNamesRouter(db: PgStore, chainId: ChainID): express.Rou
asyncHandler(async (req, res, next) => {
const { name } = req.params;
const includeUnanchored = isUnanchoredRequest(req, res, next);
let nameInfoResponse: BnsGetNameInfoResponse;
// Subdomain case
if (name.split('.').length == 3) {
const subdomainQuery = await db.getSubdomain({ subdomain: name, includeUnanchored });
if (!subdomainQuery.found) {
const namePart = name.split('.').slice(1).join('.');
const resolverResult = await db.getSubdomainResolver({ name: namePart });
if (resolverResult.found) {
if (resolverResult.result === '') {
res.status(404).json({ error: `missing resolver from a malformed zonefile` });
return;
await db
.sqlTransaction(async sql => {
let nameInfoResponse: BnsGetNameInfoResponse;
// Subdomain case
if (name.split('.').length == 3) {
const subdomainQuery = await db.getSubdomain({
subdomain: name,
includeUnanchored,
});
if (!subdomainQuery.found) {
const namePart = name.split('.').slice(1).join('.');
const resolverResult = await db.getSubdomainResolver({ name: namePart });
if (resolverResult.found) {
if (resolverResult.result === '') {
throw { error: `missing resolver from a malformed zonefile` };
}
throw new NameRedirectError(`${resolverResult.result}/v1/names${req.url}`);
}
throw { error: `cannot find subdomain ${name}` };
}
res.redirect(`${resolverResult.result}/v1/names${req.url}`);
return;
const { result } = subdomainQuery;
nameInfoResponse = {
address: result.owner,
blockchain: bnsBlockchain,
last_txid: result.tx_id,
resolver: result.resolver,
status: 'registered_subdomain',
zonefile: result.zonefile,
zonefile_hash: result.zonefile_hash,
};
} else {
const nameQuery = await db.getName({
name,
includeUnanchored: includeUnanchored,
chainId: chainId,
});
if (!nameQuery.found) {
throw { error: `cannot find name ${name}` };
}
const { result } = nameQuery;
nameInfoResponse = {
address: result.address,
blockchain: bnsBlockchain,
expire_block: result.expire_block,
grace_period: result.grace_period,
last_txid: result.tx_id ? result.tx_id : '',
resolver: result.resolver,
status: result.status ? result.status : '',
zonefile: result.zonefile,
zonefile_hash: result.zonefile_hash,
};
}
res.status(404).json({ error: `cannot find subdomain ${name}` });
return;
}
const { result } = subdomainQuery;
nameInfoResponse = {
address: result.owner,
blockchain: bnsBlockchain,
last_txid: result.tx_id,
resolver: result.resolver,
status: 'registered_subdomain',
zonefile: result.zonefile,
zonefile_hash: result.zonefile_hash,
};
} else {
const nameQuery = await db.getName({
name,
includeUnanchored: includeUnanchored,
chainId: chainId,
const response = Object.fromEntries(
Object.entries(nameInfoResponse).filter(([_, v]) => v != null)
);
return response;
})
.then(response => {
setETagCacheHeaders(res);
res.json(response);
})
.catch(error => {
if (error instanceof NameRedirectError) {
res.redirect(error.message);
} else {
res.status(400).json(error);
}
});
if (!nameQuery.found) {
res.status(404).json({ error: `cannot find name ${name}` });
return;
}
const { result } = nameQuery;
nameInfoResponse = {
address: result.address,
blockchain: bnsBlockchain,
expire_block: result.expire_block,
grace_period: result.grace_period,
last_txid: result.tx_id ? result.tx_id : '',
resolver: result.resolver,
status: result.status ? result.status : '',
zonefile: result.zonefile,
zonefile_hash: result.zonefile_hash,
};
}
const response = Object.fromEntries(
Object.entries(nameInfoResponse).filter(([_, v]) => v != null)
);
setETagCacheHeaders(res);
res.json(response);
})
);

View File

@@ -36,22 +36,31 @@ export function createBnsNamespacesRouter(db: PgStore): express.Router {
const { tld } = req.params;
const page = parsePagingQueryInput(req.query.page ?? 0);
const includeUnanchored = isUnanchoredRequest(req, res, next);
const response = await db.getNamespace({ namespace: tld, includeUnanchored });
if (!response.found) {
res.status(404).json(BnsErrors.NoSuchNamespace);
} else {
const { results } = await db.getNamespaceNamesList({
namespace: tld,
page,
includeUnanchored,
});
if (results.length === 0 && req.query.page) {
res.status(400).json(BnsErrors.InvalidPageNumber);
} else {
await db
.sqlTransaction(async sql => {
const response = await db.getNamespace({ namespace: tld, includeUnanchored });
if (!response.found) {
throw BnsErrors.NoSuchNamespace;
} else {
const { results } = await db.getNamespaceNamesList({
namespace: tld,
page,
includeUnanchored,
});
if (results.length === 0 && req.query.page) {
throw BnsErrors.InvalidPageNumber;
} else {
return results;
}
}
})
.then(results => {
setETagCacheHeaders(res);
res.json(results);
}
}
})
.catch(error => {
res.status(400).json(error);
});
})
);

View File

@@ -4,13 +4,11 @@ import { DbBlock } from '../../../datastore/common';
import { PgStore } from '../../../datastore/pg-store';
import { has0xPrefix, FoundOrNot } from '../../../helpers';
import {
NetworkIdentifier,
RosettaAccount,
RosettaBlockIdentifier,
RosettaAccountBalanceResponse,
RosettaSubAccount,
AddressTokenOfferingLocked,
AddressUnlockSchedule,
RosettaAmount,
} from '@stacks/stacks-blockchain-api-types';
import { RosettaErrors, RosettaConstants, RosettaErrorsTypes } from '../../rosetta-constants';
@@ -35,128 +33,132 @@ export function createRosettaAccountRouter(db: PgStore, chainId: ChainID): expre
const accountIdentifier: RosettaAccount = req.body.account_identifier;
const subAccountIdentifier: RosettaSubAccount = req.body.account_identifier.sub_account;
const blockIdentifier: RosettaBlockIdentifier = req.body.block_identifier;
let blockQuery: FoundOrNot<DbBlock>;
let blockHash: string = '0x';
// we need to return the block height/hash in the response, so we
// need to fetch the block first.
if (blockIdentifier === undefined) {
blockQuery = await db.getCurrentBlock();
} else if (blockIdentifier.index > 0) {
blockQuery = await db.getBlock({ height: blockIdentifier.index });
} else if (blockIdentifier.hash !== undefined) {
blockHash = blockIdentifier.hash;
if (!has0xPrefix(blockHash)) {
blockHash = '0x' + blockHash;
}
blockQuery = await db.getBlock({ hash: blockHash });
} else {
res.status(400).json(RosettaErrors[RosettaErrorsTypes.invalidBlockIdentifier]);
return;
}
if (!blockQuery.found) {
res.status(500).json(RosettaErrors[RosettaErrorsTypes.blockNotFound]);
return;
}
const block = blockQuery.result;
if (blockIdentifier?.hash !== undefined && block.block_hash !== blockIdentifier.hash) {
res.status(400).json(RosettaErrors[RosettaErrorsTypes.invalidBlockHash]);
return;
}
const stxBalance = await db.getStxBalanceAtBlock(
accountIdentifier.address,
block.block_height
);
// return spendable balance (liquid) if no sub-account is specified
let balance = (stxBalance.balance - stxBalance.locked).toString();
const accountNonceQuery = await db.getAddressNonceAtBlock({
stxAddress: accountIdentifier.address,
blockIdentifier: { height: block.block_height },
});
const sequenceNumber = accountNonceQuery.found
? accountNonceQuery.result.possibleNextNonce
: 0;
const extra_metadata: any = {};
if (subAccountIdentifier !== undefined) {
switch (subAccountIdentifier.address) {
case RosettaConstants.StackedBalance:
const lockedBalance = stxBalance.locked;
balance = lockedBalance.toString();
break;
case RosettaConstants.SpendableBalance:
const spendableBalance = stxBalance.balance - stxBalance.locked;
balance = spendableBalance.toString();
break;
case RosettaConstants.VestingLockedBalance:
case RosettaConstants.VestingUnlockedBalance:
const stxVesting = await db.getTokenOfferingLocked(
accountIdentifier.address,
block.block_height
);
if (stxVesting.found) {
const vestingInfo = getVestingInfo(stxVesting.result);
balance = vestingInfo[subAccountIdentifier.address].toString();
extra_metadata[RosettaConstants.VestingSchedule] =
vestingInfo[RosettaConstants.VestingSchedule];
} else {
balance = '0';
await db
.sqlTransaction(async sql => {
let blockQuery: FoundOrNot<DbBlock>;
let blockHash: string = '0x';
// we need to return the block height/hash in the response, so we
// need to fetch the block first.
if (blockIdentifier === undefined) {
blockQuery = await db.getCurrentBlock();
} else if (blockIdentifier.index > 0) {
blockQuery = await db.getBlock({ height: blockIdentifier.index });
} else if (blockIdentifier.hash !== undefined) {
blockHash = blockIdentifier.hash;
if (!has0xPrefix(blockHash)) {
blockHash = '0x' + blockHash;
}
break;
default:
res.status(400).json(RosettaErrors[RosettaErrorsTypes.invalidSubAccount]);
return;
}
}
const balances: RosettaAmount[] = [
{
value: balance,
currency: {
symbol: RosettaConstants.symbol,
decimals: RosettaConstants.decimals,
},
metadata: Object.keys(extra_metadata).length > 0 ? extra_metadata : undefined,
},
];
// Add Fungible Token balances.
if (isFtMetadataEnabled()) {
const ftBalances = await db.getFungibleTokenBalances({
stxAddress: accountIdentifier.address,
untilBlock: block.block_height,
});
for (const [ftAssetIdentifier, ftBalance] of ftBalances) {
const ftMetadata = await getValidatedFtMetadata(db, ftAssetIdentifier);
if (ftMetadata) {
balances.push({
value: ftBalance.balance.toString(),
currency: {
symbol: ftMetadata.symbol,
decimals: ftMetadata.decimals,
},
});
blockQuery = await db.getBlock({ hash: blockHash });
} else {
throw RosettaErrors[RosettaErrorsTypes.invalidBlockIdentifier];
}
}
}
const response: RosettaAccountBalanceResponse = {
block_identifier: {
index: block.block_height,
hash: block.block_hash,
},
balances: balances,
metadata: {
sequence_number: sequenceNumber,
},
};
if (!blockQuery.found) {
throw RosettaErrors[RosettaErrorsTypes.blockNotFound];
}
res.json(response);
const block = blockQuery.result;
if (blockIdentifier?.hash !== undefined && block.block_hash !== blockIdentifier.hash) {
throw RosettaErrors[RosettaErrorsTypes.invalidBlockHash];
}
const stxBalance = await db.getStxBalanceAtBlock(
accountIdentifier.address,
block.block_height
);
// return spendable balance (liquid) if no sub-account is specified
let balance = (stxBalance.balance - stxBalance.locked).toString();
const accountNonceQuery = await db.getAddressNonceAtBlock({
stxAddress: accountIdentifier.address,
blockIdentifier: { height: block.block_height },
});
const sequenceNumber = accountNonceQuery.found
? accountNonceQuery.result.possibleNextNonce
: 0;
const extra_metadata: any = {};
if (subAccountIdentifier !== undefined) {
switch (subAccountIdentifier.address) {
case RosettaConstants.StackedBalance:
const lockedBalance = stxBalance.locked;
balance = lockedBalance.toString();
break;
case RosettaConstants.SpendableBalance:
const spendableBalance = stxBalance.balance - stxBalance.locked;
balance = spendableBalance.toString();
break;
case RosettaConstants.VestingLockedBalance:
case RosettaConstants.VestingUnlockedBalance:
const stxVesting = await db.getTokenOfferingLocked(
accountIdentifier.address,
block.block_height
);
if (stxVesting.found) {
const vestingInfo = getVestingInfo(stxVesting.result);
balance = vestingInfo[subAccountIdentifier.address].toString();
extra_metadata[RosettaConstants.VestingSchedule] =
vestingInfo[RosettaConstants.VestingSchedule];
} else {
balance = '0';
}
break;
default:
throw RosettaErrors[RosettaErrorsTypes.invalidSubAccount];
}
}
const balances: RosettaAmount[] = [
{
value: balance,
currency: {
symbol: RosettaConstants.symbol,
decimals: RosettaConstants.decimals,
},
metadata: Object.keys(extra_metadata).length > 0 ? extra_metadata : undefined,
},
];
// Add Fungible Token balances.
if (isFtMetadataEnabled()) {
const ftBalances = await db.getFungibleTokenBalances({
stxAddress: accountIdentifier.address,
untilBlock: block.block_height,
});
for (const [ftAssetIdentifier, ftBalance] of ftBalances) {
const ftMetadata = await getValidatedFtMetadata(db, ftAssetIdentifier);
if (ftMetadata) {
balances.push({
value: ftBalance.balance.toString(),
currency: {
symbol: ftMetadata.symbol,
decimals: ftMetadata.decimals,
},
});
}
}
}
const response: RosettaAccountBalanceResponse = {
block_identifier: {
index: block.block_height,
hash: block.block_hash,
},
balances: balances,
metadata: {
sequence_number: sequenceNumber,
},
};
return response;
})
.catch(error => {
res.status(400).json(error);
})
.then(response => {
res.json(response);
});
})
);

View File

@@ -535,6 +535,7 @@ export function createRosettaConstructionRouter(db: PgStore, chainId: ChainID):
res.json(response);
} catch (error) {
console.error(error);
res.status(400).json(RosettaErrors[RosettaErrorsTypes.unknownError]);
}
})
);

View File

@@ -62,28 +62,39 @@ export function createRosettaMempoolRouter(db: PgStore, chainId: ChainID): expre
if (!has0xPrefix(tx_id)) {
tx_id = '0x' + tx_id;
}
const mempoolTxQuery = await db.getMempoolTx({ txId: tx_id, includeUnanchored: false });
await db
.sqlTransaction(async sql => {
const mempoolTxQuery = await db.getMempoolTx({
txId: tx_id,
includeUnanchored: false,
});
if (!mempoolTxQuery.found) {
res.status(500).json(RosettaErrors[RosettaErrorsTypes.transactionNotFound]);
return;
}
if (!mempoolTxQuery.found) {
throw RosettaErrors[RosettaErrorsTypes.transactionNotFound];
}
const operations = await getOperations(mempoolTxQuery.result, db, chainId);
const txMemo = parseTransactionMemo(mempoolTxQuery.result);
const transaction: RosettaTransaction = {
transaction_identifier: { hash: tx_id },
operations: operations,
};
if (txMemo) {
transaction.metadata = {
memo: txMemo,
};
}
const result: RosettaMempoolTransactionResponse = {
transaction: transaction,
};
res.json(result);
const operations = await getOperations(mempoolTxQuery.result, db, chainId);
const txMemo = parseTransactionMemo(mempoolTxQuery.result);
const transaction: RosettaTransaction = {
transaction_identifier: { hash: tx_id },
operations: operations,
};
if (txMemo) {
transaction.metadata = {
memo: txMemo,
};
}
const result: RosettaMempoolTransactionResponse = {
transaction: transaction,
};
return result;
})
.then(result => {
res.json(result);
})
.catch(error => {
res.status(400).json(error);
});
})
);

View File

@@ -13,6 +13,7 @@ import {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const middleware_version = require('../../../../package.json').version;
import {
RosettaBlock,
RosettaNetworkListResponse,
RosettaNetworkOptionsResponse,
RosettaNetworkStatusResponse,
@@ -48,15 +49,24 @@ export function createRosettaNetworkRouter(db: PgStore, chainId: ChainID): expre
return;
}
const block = await getRosettaBlockFromDataStore(db, false, chainId);
if (!block.found) {
res.status(500).json(RosettaErrors[RosettaErrorsTypes.blockNotFound]);
return;
}
const genesis = await getRosettaBlockFromDataStore(db, false, chainId, undefined, 1);
if (!genesis.found) {
res.status(500).json(RosettaErrors[RosettaErrorsTypes.blockNotFound]);
let block: RosettaBlock;
let genesis: RosettaBlock;
try {
const results = await db.sqlTransaction(async sql => {
const block = await getRosettaBlockFromDataStore(db, false, chainId);
if (!block.found) {
throw RosettaErrors[RosettaErrorsTypes.blockNotFound];
}
const genesis = await getRosettaBlockFromDataStore(db, false, chainId, undefined, 1);
if (!genesis.found) {
throw RosettaErrors[RosettaErrorsTypes.blockNotFound];
}
return { block: block.result, genesis: genesis.result };
});
block = results.block;
genesis = results.genesis;
} catch (error) {
res.status(400).json(error);
return;
}
@@ -75,21 +85,20 @@ export function createRosettaNetworkRouter(db: PgStore, chainId: ChainID): expre
return { peer_id: peerId };
});
const currentTipHeight = block.result.block_identifier.index;
const currentTipHeight = block.block_identifier.index;
const response: RosettaNetworkStatusResponse = {
current_block_identifier: {
index: block.result.block_identifier.index,
hash: block.result.block_identifier.hash,
index: block.block_identifier.index,
hash: block.block_identifier.hash,
},
current_block_timestamp: block.result.timestamp,
current_block_timestamp: block.timestamp,
genesis_block_identifier: {
index: genesis.result.block_identifier.index,
hash: genesis.result.block_identifier.hash,
index: genesis.block_identifier.index,
hash: genesis.block_identifier.hash,
},
peers,
};
const nodeInfo = await stacksCoreRpcClient.getInfo();
const referenceNodeTipHeight = nodeInfo.stacks_tip_height;
const synced = currentTipHeight === referenceNodeTipHeight;
@@ -100,7 +109,6 @@ export function createRosettaNetworkRouter(db: PgStore, chainId: ChainID): expre
synced: synced,
};
response.sync_status = status;
res.json(response);
})
);

View File

@@ -274,7 +274,9 @@ export function createSearchRouter(db: PgStore): express.Router {
const { term: rawTerm } = req.params;
const includeMetadata = booleanValueForParam(req, res, next, 'include_metadata');
const term = rawTerm.trim();
const searchResult = await performSearch(term, includeMetadata);
const searchResult = await db.sqlTransaction(async sql => {
return await performSearch(term, includeMetadata);
});
if (!searchResult.found) {
res.status(404);
}

View File

@@ -14,7 +14,7 @@ import {
} from '@stacks/stacks-blockchain-api-types';
import { parseLimitQuery, parsePagingQueryInput } from './../../pagination';
import { isFtMetadataEnabled, isNftMetadataEnabled } from '../../../token-metadata/helpers';
import { bufferToHexPrefixString, has0xPrefix, isValidPrincipal } from '../../../helpers';
import { has0xPrefix, isValidPrincipal } from '../../../helpers';
import { booleanValueForParam, isUnanchoredRequest } from '../../../api/query-helpers';
import { decodeClarityValueToRepr } from 'stacks-encoding-native-js';
import { getAssetEventTypeString, parseDbTx } from '../../controllers/db-controller';
@@ -118,44 +118,53 @@ export function createTokenRouter(db: PgStore): express.Router {
if (!has0xPrefix(value)) {
value = `0x${value}`;
}
const strValue = value;
const limit = parseTokenQueryLimit(req.query.limit ?? 50);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const chainTip = await db.getCurrentBlockHeight();
if (!chainTip.found) {
res.status(400).json({ error: `Unable to find a valid block to query` });
return;
}
const includeUnanchored = isUnanchoredRequest(req, res, next);
const includeTxMetadata = booleanValueForParam(req, res, next, 'tx_metadata');
const { results, total } = await db.getNftHistory({
assetIdentifier: assetIdentifier,
value: value,
limit: limit,
offset: offset,
blockHeight: includeUnanchored ? chainTip.result + 1 : chainTip.result,
includeTxMetadata: includeTxMetadata,
});
const parsedResults: NonFungibleTokenHistoryEvent[] = results.map(result => {
const parsedNftData = {
sender: result.nft_event.sender,
recipient: result.nft_event.recipient,
event_index: result.nft_event.event_index,
asset_event_type: getAssetEventTypeString(result.nft_event.asset_event_type_id),
};
if (includeTxMetadata && result.tx) {
return { ...parsedNftData, tx: parseDbTx(result.tx) };
}
return { ...parsedNftData, tx_id: result.nft_event.tx_id };
});
const response: NonFungibleTokenHistoryEventList = {
limit: limit,
offset: offset,
total: total,
results: parsedResults,
};
setETagCacheHeaders(res);
res.status(200).json(response);
await db
.sqlTransaction(async sql => {
const chainTip = await db.getCurrentBlockHeight();
if (!chainTip.found) {
throw { error: `Unable to find a valid block to query` };
}
const { results, total } = await db.getNftHistory({
assetIdentifier: assetIdentifier,
value: strValue,
limit: limit,
offset: offset,
blockHeight: includeUnanchored ? chainTip.result + 1 : chainTip.result,
includeTxMetadata: includeTxMetadata,
});
const parsedResults: NonFungibleTokenHistoryEvent[] = results.map(result => {
const parsedNftData = {
sender: result.nft_event.sender,
recipient: result.nft_event.recipient,
event_index: result.nft_event.event_index,
asset_event_type: getAssetEventTypeString(result.nft_event.asset_event_type_id),
};
if (includeTxMetadata && result.tx) {
return { ...parsedNftData, tx: parseDbTx(result.tx) };
}
return { ...parsedNftData, tx_id: result.nft_event.tx_id };
});
const response: NonFungibleTokenHistoryEventList = {
limit: limit,
offset: offset,
total: total,
results: parsedResults,
};
return response;
})
.then(response => {
setETagCacheHeaders(res);
res.status(200).json(response);
})
.catch(error => {
res.status(400).json(error);
});
})
);
@@ -173,44 +182,52 @@ export function createTokenRouter(db: PgStore): express.Router {
}
const limit = parseTokenQueryLimit(req.query.limit ?? 50);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const chainTip = await db.getCurrentBlockHeight();
if (!chainTip.found) {
res.status(400).json({ error: `Unable to find a valid block to query` });
return;
}
const includeUnanchored = isUnanchoredRequest(req, res, next);
const includeTxMetadata = booleanValueForParam(req, res, next, 'tx_metadata');
const { results, total } = await db.getNftMints({
assetIdentifier: assetIdentifier,
limit: limit,
offset: offset,
blockHeight: includeUnanchored ? chainTip.result + 1 : chainTip.result,
includeTxMetadata: includeTxMetadata,
});
const parsedResults: NonFungibleTokenMint[] = results.map(result => {
const parsedClarityValue = decodeClarityValueToRepr(result.nft_event.value);
const parsedNftData = {
recipient: result.nft_event.recipient,
event_index: result.nft_event.event_index,
value: {
hex: result.nft_event.value,
repr: parsedClarityValue,
},
};
if (includeTxMetadata && result.tx) {
return { ...parsedNftData, tx: parseDbTx(result.tx) };
}
return { ...parsedNftData, tx_id: result.nft_event.tx_id };
});
const response: NonFungibleTokenMintList = {
limit: limit,
offset: offset,
total: total,
results: parsedResults,
};
setETagCacheHeaders(res);
res.status(200).json(response);
await db
.sqlTransaction(async sql => {
const chainTip = await db.getCurrentBlockHeight();
if (!chainTip.found) {
throw { error: `Unable to find a valid block to query` };
}
const { results, total } = await db.getNftMints({
assetIdentifier: assetIdentifier,
limit: limit,
offset: offset,
blockHeight: includeUnanchored ? chainTip.result + 1 : chainTip.result,
includeTxMetadata: includeTxMetadata,
});
const parsedResults: NonFungibleTokenMint[] = results.map(result => {
const parsedClarityValue = decodeClarityValueToRepr(result.nft_event.value);
const parsedNftData = {
recipient: result.nft_event.recipient,
event_index: result.nft_event.event_index,
value: {
hex: result.nft_event.value,
repr: parsedClarityValue,
},
};
if (includeTxMetadata && result.tx) {
return { ...parsedNftData, tx: parseDbTx(result.tx) };
}
return { ...parsedNftData, tx_id: result.nft_event.tx_id };
});
const response: NonFungibleTokenMintList = {
limit: limit,
offset: offset,
total: total,
results: parsedResults,
};
return response;
})
.then(response => {
setETagCacheHeaders(res);
res.status(200).json(response);
})
.catch(error => {
res.status(400).json(error);
});
})
);

View File

@@ -115,22 +115,27 @@ export class WebSocketTransmitter {
if (this.channels.find(c => c.hasListeners('transaction', txId))) {
try {
// Look at the `txs` table first so we always prefer the confirmed transaction.
const txQuery = await getTxFromDataStore(this.db, {
txId: txId,
includeUnanchored: true,
});
if (txQuery.found) {
this.channels.forEach(c => c.send('transaction', txQuery.result));
} else {
// Tx is not yet confirmed, look at `mempool_txs`.
const mempoolTxs = await getMempoolTxsFromDataStore(this.db, {
txIds: [txId],
const result = await this.db.sqlTransaction(async sql => {
// Look at the `txs` table first so we always prefer the confirmed transaction.
const txQuery = await getTxFromDataStore(this.db, {
txId: txId,
includeUnanchored: true,
});
if (mempoolTxs.length > 0) {
this.channels.forEach(c => c.send('transaction', mempoolTxs[0]));
if (txQuery.found) {
return txQuery.result;
} else {
// Tx is not yet confirmed, look at `mempool_txs`.
const mempoolTxs = await getMempoolTxsFromDataStore(this.db, {
txIds: [txId],
includeUnanchored: true,
});
if (mempoolTxs.length > 0) {
return mempoolTxs[0];
}
}
});
if (result) {
this.channels.forEach(c => c.send('transaction', result));
}
} catch (error) {
logger.error(error);
@@ -197,23 +202,26 @@ export class WebSocketTransmitter {
if (this.channels.find(c => c.hasListeners('principalStxBalance', address))) {
try {
const stxBalanceResult = await this.db.getStxBalanceAtBlock(address, blockHeight);
const tokenOfferingLocked = await this.db.getTokenOfferingLocked(address, blockHeight);
const balance: AddressStxBalanceResponse = {
balance: stxBalanceResult.balance.toString(),
total_sent: stxBalanceResult.totalSent.toString(),
total_received: stxBalanceResult.totalReceived.toString(),
total_fees_sent: stxBalanceResult.totalFeesSent.toString(),
total_miner_rewards_received: stxBalanceResult.totalMinerRewardsReceived.toString(),
lock_tx_id: stxBalanceResult.lockTxId,
locked: stxBalanceResult.locked.toString(),
lock_height: stxBalanceResult.lockHeight,
burnchain_lock_height: stxBalanceResult.burnchainLockHeight,
burnchain_unlock_height: stxBalanceResult.burnchainUnlockHeight,
};
if (tokenOfferingLocked.found) {
balance.token_offering_locked = tokenOfferingLocked.result;
}
const balance = await this.db.sqlTransaction(async sql => {
const stxBalanceResult = await this.db.getStxBalanceAtBlock(address, blockHeight);
const tokenOfferingLocked = await this.db.getTokenOfferingLocked(address, blockHeight);
const balance: AddressStxBalanceResponse = {
balance: stxBalanceResult.balance.toString(),
total_sent: stxBalanceResult.totalSent.toString(),
total_received: stxBalanceResult.totalReceived.toString(),
total_fees_sent: stxBalanceResult.totalFeesSent.toString(),
total_miner_rewards_received: stxBalanceResult.totalMinerRewardsReceived.toString(),
lock_tx_id: stxBalanceResult.lockTxId,
locked: stxBalanceResult.locked.toString(),
lock_height: stxBalanceResult.lockHeight,
burnchain_lock_height: stxBalanceResult.burnchainLockHeight,
burnchain_unlock_height: stxBalanceResult.burnchainUnlockHeight,
};
if (tokenOfferingLocked.found) {
balance.token_offering_locked = tokenOfferingLocked.result;
}
return balance;
});
this.channels.forEach(c => c.send('principalStxBalance', address, balance));
} catch (error) {
logger.error(error);

View File

@@ -2,7 +2,7 @@ import { logError, parseArgBoolean, parsePort, stopwatch, timeout } from '../hel
import * as postgres from 'postgres';
import { isPgConnectionError } from './helpers';
export type PgSqlClient = postgres.Sql<any>;
export type PgSqlClient = postgres.Sql<any> | postgres.TransactionSql<any>;
/**
* The postgres server being used for a particular connection, transaction or query.

View File

@@ -99,6 +99,23 @@ import {
PgTokensNotificationPayload,
PgTxNotificationPayload,
} from './pg-notifier';
import { AsyncLocalStorage } from 'async_hooks';
export type UnwrapPromiseArray<T> = T extends any[]
? {
[k in keyof T]: T[k] extends Promise<infer R> ? R : T[k];
}
: T;
type SqlTransactionContext = {
usageName: string;
sql: PgSqlClient;
};
/**
* AsyncLocalStorage which determines if the current async context is running inside a SQL
* transaction.
*/
export const sqlTransactionContext = new AsyncLocalStorage<SqlTransactionContext>();
/**
* This is the main interface between the API and the Postgres database. It contains all methods that
@@ -107,15 +124,24 @@ import {
* happened in the `PgServer.primary` server (see `.env`).
*/
export class PgStore {
readonly sql: PgSqlClient;
readonly eventEmitter: PgStoreEventEmitter;
readonly notifier?: PgNotifier;
protected get closeTimeout(): number {
return parseInt(getPgConnectionEnvValue('CLOSE_TIMEOUT', PgServer.default) ?? '5');
}
private readonly _sql: PgSqlClient;
/**
* Getter for a SQL client. If used inside `sqlTransaction`, the scoped client within the current
* async context will be returned to guarantee transaction consistency.
*/
get sql(): PgSqlClient {
const sqlContext = sqlTransactionContext.getStore();
return sqlContext ? sqlContext.sql : this._sql;
}
constructor(sql: PgSqlClient, notifier: PgNotifier | undefined = undefined) {
this.sql = sql;
this._sql = sql;
this.notifier = notifier;
this.eventEmitter = new PgStoreEventEmitter();
}
@@ -139,6 +165,31 @@ export class PgStore {
await this.sql.end({ timeout: this.closeTimeout });
}
/**
* Start a SQL transaction. If any SQL client used within the callback was already scoped inside a
* `BEGIN` transaction, no new transaction will be opened. This flexibility allows us to avoid
* repeating code while making sure we don't arrive at SQL errors such as
* `WARNING: there is already a transaction in progress` which may cause result inconsistencies.
* @param callback - Callback with a scoped SQL client
* @param readOnly - If a `BEGIN` transaction should be marked as `READ ONLY`
* @returns Transaction results
*/
async sqlTransaction<T>(
callback: (sql: PgSqlClient) => T | Promise<T>,
readOnly = true
): Promise<UnwrapPromiseArray<T>> {
// Do we have a scoped client already? Use it directly.
const sqlContext = sqlTransactionContext.getStore();
if (sqlContext) {
return callback(sqlContext.sql) as UnwrapPromiseArray<T>;
}
// Otherwise, start a transaction and store the scoped connection in the current async context.
const usageName = this._sql.options.connection.application_name ?? '';
return this._sql.begin(readOnly ? 'read only' : 'read write', sql => {
return sqlTransactionContext.run({ usageName, sql }, () => callback(sql));
});
}
/**
* Get `application_name` for current connection (each connection has a unique PID)
*/
@@ -211,11 +262,11 @@ export class PgStore {
}
async getBlockWithMetadata<TWithTxs extends boolean, TWithMicroblocks extends boolean>(
blockIdentifer: BlockIdentifier,
blockIdentifier: BlockIdentifier,
metadata?: DbGetBlockWithMetadataOpts<TWithTxs, TWithMicroblocks>
): Promise<FoundOrNot<DbGetBlockWithMetadataResponse<TWithTxs, TWithMicroblocks>>> {
return await this.sql.begin('READ ONLY', async sql => {
const block = await this.getBlockInternal(sql, blockIdentifer);
return await this.sqlTransaction(async sql => {
const block = await this.getBlockInternal(sql, blockIdentifier);
if (!block.found) {
return { found: false };
}
@@ -391,7 +442,7 @@ export class PgStore {
limit: number;
offset: number;
}): Promise<BlocksWithMetadata> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
// Get blocks with count.
const countQuery = await sql<{ count: number }[]>`
SELECT block_count AS count FROM chain_tip
@@ -499,7 +550,7 @@ export class PgStore {
}
async getBlockTxsRows(blockHash: string): Promise<FoundOrNot<DbTx[]>> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const blockQuery = await this.getBlockInternal(sql, { hash: blockHash });
if (!blockQuery.found) {
throw new Error(`Could not find block by hash ${blockHash}`);
@@ -522,7 +573,7 @@ export class PgStore {
async getMicroblock(args: {
microblockHash: string;
}): Promise<FoundOrNot<{ microblock: DbMicroblock; txs: string[] }>> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const result = await sql<MicroblockQueryResult[]>`
SELECT ${sql(MICROBLOCK_COLUMNS)}
FROM microblocks
@@ -549,7 +600,7 @@ export class PgStore {
limit: number;
offset: number;
}): Promise<{ result: { microblock: DbMicroblock; txs: string[] }[]; total: number }> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const countQuery = await sql<
{ total: number }[]
>`SELECT microblock_count AS total FROM chain_tip`;
@@ -600,7 +651,7 @@ export class PgStore {
}
async getUnanchoredTxs(): Promise<{ txs: DbTx[] }> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
return this.getUnanchoredTxsInternal(sql);
});
}
@@ -609,7 +660,7 @@ export class PgStore {
stxAddress: string;
blockIdentifier: BlockIdentifier;
}): Promise<FoundOrNot<{ lastExecutedTxNonce: number | null; possibleNextNonce: number }>> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const dbBlock = await this.getBlockInternal(sql, args.blockIdentifier);
if (!dbBlock.found) {
return { found: false };
@@ -641,7 +692,7 @@ export class PgStore {
possibleNextNonce: number;
detectedMissingNonces: number[];
}> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const executedTxNonce = await sql<{ nonce: number | null }[]>`
SELECT MAX(nonce) nonce
FROM txs
@@ -787,7 +838,7 @@ export class PgStore {
limit: number,
offset: number
): Promise<FoundOrNot<{ results: DbTx[]; total: number }>> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const blockQuery = await this.getBlockInternal(sql, blockIdentifer);
if (!blockQuery.found) {
return { found: false };
@@ -948,7 +999,7 @@ export class PgStore {
if (args.txIds.length === 0) {
return [];
}
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const result = await sql<MempoolTxQueryResult[]>`
SELECT ${unsafeCols(sql, [...MEMPOOL_TX_COLUMNS, abiColumn('mempool_txs')])}
FROM mempool_txs
@@ -967,7 +1018,7 @@ export class PgStore {
includeUnanchored: boolean;
includePruned?: boolean;
}): Promise<FoundOrNot<DbMempoolTx>> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const result = await sql<MempoolTxQueryResult[]>`
SELECT ${unsafeCols(sql, [...MEMPOOL_TX_COLUMNS, abiColumn('mempool_txs')])}
FROM mempool_txs
@@ -1011,7 +1062,7 @@ export class PgStore {
limit: number;
offset: number;
}): Promise<{ results: DbMempoolTx[]; total: number }> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const droppedStatuses = [
DbTxStatus.DroppedReplaceByFee,
DbTxStatus.DroppedReplaceAcrossFork,
@@ -1048,7 +1099,7 @@ export class PgStore {
}
async getMempoolStats({ lastBlockCount }: { lastBlockCount?: number }): Promise<DbMempoolStats> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
return await this.getMempoolStatsInternal({ sql, lastBlockCount });
});
}
@@ -1225,7 +1276,7 @@ export class PgStore {
recipientAddress?: string;
address?: string;
}): Promise<{ results: DbMempoolTx[]; total: number }> {
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
// If caller did not opt-in to unanchored tx data, then treat unanchored txs as pending mempool txs.
const unanchoredTxs: string[] = !includeUnanchored
? (await this.getUnanchoredTxsInternal(sql)).txs.map(tx => tx.tx_id)
@@ -1293,7 +1344,7 @@ export class PgStore {
txId: string;
includeUnanchored: boolean;
}): Promise<FoundOrNot<DbTx>> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
const result = await sql<ContractTxQueryResult[]>`
SELECT ${unsafeCols(sql, [...TX_COLUMNS, abiColumn()])}
@@ -1336,7 +1387,7 @@ export class PgStore {
}): Promise<{ results: DbTx[]; total: number }> {
let totalQuery: { count: number }[];
let resultQuery: ContractTxQueryResult[];
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const maxHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
if (txTypeFilter.length === 0) {
totalQuery = await sql<{ count: number }[]>`
@@ -1382,7 +1433,7 @@ export class PgStore {
limit: number;
offset: number;
}): Promise<{ results: DbEvent[] }> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
if (args.txs.length === 0) return { results: [] };
// TODO: This hack has to be done because postgres.js can't figure out how to interpolate
// these `bytea` VALUES comparisons yet.
@@ -1506,7 +1557,7 @@ export class PgStore {
// To prevent that, all micro-orphaned events are excluded using `microblock_orphaned=false`.
// That means, unlike regular orphaned txs, if a micro-orphaned tx is never re-mined, the micro-orphaned event data
// will never be returned.
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const eventIndexStart = args.offset;
const eventIndexEnd = args.offset + args.limit - 1;
const stxLockResults = await sql<
@@ -1624,7 +1675,7 @@ export class PgStore {
limit: number;
offset: number;
}): Promise<{ results: DbEvent[] }> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const refValue = args.addressOrTxId.address ?? args.addressOrTxId.txId;
const isAddress = args.addressOrTxId.address !== undefined;
const emptyEvents = sql`SELECT NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL`;
@@ -1953,7 +2004,7 @@ export class PgStore {
stxAddress: string;
includeUnanchored: boolean;
}): Promise<DbStxBalance> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const blockQuery = await this.getCurrentBlockInternal(sql);
if (!blockQuery.found) {
throw new Error(`Could not find current block`);
@@ -1973,7 +2024,7 @@ export class PgStore {
}
async getStxBalanceAtBlock(stxAddress: string, blockHeight: number): Promise<DbStxBalance> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const chainTip = await this.getChainTip(sql);
const blockHeightToQuery =
blockHeight > chainTip.blockHeight ? chainTip.blockHeight : blockHeight;
@@ -2086,7 +2137,7 @@ export class PgStore {
}
| { includeUnanchored: boolean }
): Promise<{ stx: bigint; blockHeight: number }> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
let atBlockHeight: number;
let atMatureBlockHeight: number;
if ('blockHeight' in args) {
@@ -2343,7 +2394,7 @@ export class PgStore {
}
async getTxStatus(txId: string): Promise<FoundOrNot<DbTxGlobalStatus>> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const chainResult = await sql<DbTxGlobalStatus[]>`
SELECT status, index_block_hash, microblock_hash
FROM txs
@@ -2669,7 +2720,7 @@ export class PgStore {
async searchHash({ hash }: { hash: string }): Promise<FoundOrNot<DbSearchResult>> {
// TODO(mb): add support for searching for microblock by hash
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const txQuery = await sql<ContractTxQueryResult[]>`
SELECT ${unsafeCols(sql, [...TX_COLUMNS, abiColumn()])}
FROM txs WHERE tx_id = ${hash} LIMIT 1
@@ -2728,7 +2779,7 @@ export class PgStore {
entity_id: principal,
},
} as const;
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
if (isContract) {
const contractMempoolTxResult = await sql<MempoolTxQueryResult[]>`
SELECT ${unsafeCols(sql, [...MEMPOOL_TX_COLUMNS, abiColumn('mempool_txs')])}
@@ -3120,7 +3171,7 @@ export class PgStore {
if (txIds.length === 0) {
return [];
}
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
const result = await sql<ContractTxQueryResult[]>`
SELECT ${unsafeCols(sql, [...TX_COLUMNS, abiColumn()])}
@@ -3140,7 +3191,7 @@ export class PgStore {
}
async getNamespaceList({ includeUnanchored }: { includeUnanchored: boolean }) {
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
return await sql<{ namespace_id: string }[]>`
SELECT DISTINCT ON (namespace_id) namespace_id
@@ -3166,7 +3217,7 @@ export class PgStore {
results: string[];
}> {
const offset = page * 100;
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
return await sql<{ name: string }[]>`
SELECT DISTINCT ON (name) name
@@ -3190,7 +3241,7 @@ export class PgStore {
namespace: string;
includeUnanchored: boolean;
}): Promise<FoundOrNot<DbBnsNamespace & { index_block_hash: string }>> {
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
return await sql<(DbBnsNamespace & { tx_id: string; index_block_hash: string })[]>`
SELECT DISTINCT ON (namespace_id) namespace_id, *
@@ -3224,7 +3275,7 @@ export class PgStore {
includeUnanchored: boolean;
chainId: ChainID;
}): Promise<FoundOrNot<DbBnsName & { index_block_hash: string }>> {
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
const nameZonefile = await sql<(DbBnsName & { tx_id: string; index_block_hash: string })[]>`
SELECT n.*, z.zonefile
@@ -3263,7 +3314,7 @@ export class PgStore {
zoneFileHash: string;
includeUnanchored: boolean;
}): Promise<FoundOrNot<DbBnsZoneFile>> {
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, {
includeUnanchored: args.includeUnanchored,
});
@@ -3319,7 +3370,7 @@ export class PgStore {
name: string;
includeUnanchored: boolean;
}): Promise<FoundOrNot<DbBnsZoneFile>> {
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
// Depending on the kind of name we got, use the correct table to pivot on canonical chain
// state to get the zonefile. We can't pivot on the `txs` table because some names/subdomains
@@ -3372,7 +3423,7 @@ export class PgStore {
includeUnanchored: boolean;
chainId: ChainID;
}): Promise<FoundOrNot<string[]>> {
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
// 1. Get subdomains owned by this address.
// These don't produce NFT events so we have to look directly at the `subdomains` table.
@@ -3456,7 +3507,7 @@ export class PgStore {
name: string;
includeUnanchored: boolean;
}): Promise<{ results: string[] }> {
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
return await sql<{ fully_qualified_subdomain: string }[]>`
SELECT DISTINCT ON (fully_qualified_subdomain) fully_qualified_subdomain
@@ -3480,7 +3531,7 @@ export class PgStore {
includeUnanchored: boolean;
}) {
const offset = page * 100;
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
return await sql<{ fully_qualified_subdomain: string }[]>`
SELECT DISTINCT ON (fully_qualified_subdomain) fully_qualified_subdomain
@@ -3498,7 +3549,7 @@ export class PgStore {
async getNamesList({ page, includeUnanchored }: { page: number; includeUnanchored: boolean }) {
const offset = page * 100;
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
return await sql<{ name: string }[]>`
SELECT DISTINCT ON (name) name
@@ -3521,7 +3572,7 @@ export class PgStore {
subdomain: string;
includeUnanchored: boolean;
}): Promise<FoundOrNot<DbBnsSubdomain & { index_block_hash: string }>> {
const queryResult = await this.sql.begin('READ ONLY', async sql => {
const queryResult = await this.sqlTransaction(async sql => {
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
return await sql<(DbBnsSubdomain & { tx_id: string; index_block_hash: string })[]>`
SELECT s.*, z.zonefile
@@ -3608,7 +3659,7 @@ export class PgStore {
}
async getUnlockedAddressesAtBlock(block: DbBlock): Promise<StxUnlockEvent[]> {
return await this.sql.begin('READ ONLY', async client => {
return await this.sqlTransaction(async client => {
return await this.internalGetUnlockedAccountsAtHeight(client, block);
});
}
@@ -3737,7 +3788,7 @@ export class PgStore {
limit: number;
offset: number;
}): Promise<{ results: DbFungibleTokenMetadata[]; total: number }> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const totalQuery = await sql<{ count: number }[]>`
SELECT COUNT(*)::integer
FROM ft_metadata
@@ -3774,7 +3825,7 @@ export class PgStore {
limit: number;
offset: number;
}): Promise<{ results: DbNonFungibleTokenMetadata[]; total: number }> {
return await this.sql.begin('READ ONLY', async sql => {
return await this.sqlTransaction(async sql => {
const totalQuery = await sql<{ count: number }[]>`
SELECT COUNT(*)::integer
FROM nft_metadata

View File

@@ -72,7 +72,7 @@ import {
validateZonefileHash,
} from './helpers';
import { PgNotifier } from './pg-notifier';
import { PgStore } from './pg-store';
import { PgStore, UnwrapPromiseArray } from './pg-store';
import {
connectPostgres,
getPgConnectionEnvValue,
@@ -140,6 +140,12 @@ export class PgWriteStore extends PgStore {
return store;
}
async sqlWriteTransaction<T>(
callback: (sql: PgSqlClient) => T | Promise<T>
): Promise<UnwrapPromiseArray<T>> {
return super.sqlTransaction(callback, false);
}
async getChainTip(
sql: PgSqlClient,
useMaterializedView = true
@@ -197,7 +203,7 @@ export class PgWriteStore extends PgStore {
let garbageCollectedMempoolTxs: string[] = [];
let batchedTxData: DataStoreTxEventData[] = [];
await this.sql.begin(async sql => {
await this.sqlWriteTransaction(async sql => {
const chainTip = await this.getChainTip(sql, false);
await this.handleReorg(sql, data.block, chainTip.blockHeight);
// If the incoming block is not of greater height than current chain tip, then store data as non-canonical.
@@ -488,7 +494,7 @@ export class PgWriteStore extends PgStore {
burnchainBlockHeight: number;
slotHolders: DbRewardSlotHolder[];
}): Promise<void> {
await this.sql.begin(async sql => {
await this.sqlWriteTransaction(async sql => {
const existingSlotHolders = await sql<{ address: string }[]>`
UPDATE reward_slot_holders
SET canonical = false
@@ -543,7 +549,7 @@ export class PgWriteStore extends PgStore {
const txData: DataStoreTxEventData[] = [];
let dbMicroblocks: DbMicroblock[] = [];
await this.sql.begin(async sql => {
await this.sqlWriteTransaction(async sql => {
// Sanity check: ensure incoming microblocks have a `parent_index_block_hash` that matches the API's
// current known canonical chain tip. We assume this holds true so incoming microblock data is always
// treated as being built off the current canonical anchor block.
@@ -884,7 +890,7 @@ export class PgWriteStore extends PgStore {
data: DbBnsSubdomain[]
): Promise<void> {
if (data.length == 0) return;
await this.sql.begin(async sql => {
await this.sqlWriteTransaction(async sql => {
await this.updateBatchSubdomains(sql, [{ blockData, subdomains: data }]);
await this.updateBatchZonefiles(sql, [{ blockData, subdomains: data }]);
});
@@ -1007,7 +1013,7 @@ export class PgWriteStore extends PgStore {
}
async updateAttachments(attachments: DataStoreAttachmentData[]): Promise<void> {
await this.sql.begin(async sql => {
await this.sqlWriteTransaction(async sql => {
// Each attachment will batch insert zonefiles for name and all subdomains that apply.
for (const attachment of attachments) {
const subdomainData: DataStoreAttachmentSubdomainData[] = [];
@@ -1189,7 +1195,7 @@ export class PgWriteStore extends PgStore {
burnchainBlockHeight: number;
rewards: DbBurnchainReward[];
}): Promise<void> {
return await this.sql.begin(async sql => {
return await this.sqlWriteTransaction(async sql => {
const existingRewards = await sql<
{
reward_recipient: string;
@@ -1283,7 +1289,7 @@ export class PgWriteStore extends PgStore {
async updateMempoolTxs({ mempoolTxs: txs }: { mempoolTxs: DbMempoolTx[] }): Promise<void> {
const updatedTxs: DbMempoolTx[] = [];
await this.sql.begin(async sql => {
await this.sqlWriteTransaction(async sql => {
const chainTip = await this.getChainTip(sql, false);
for (const tx of txs) {
const values: MempoolTxInsertValues = {
@@ -1571,7 +1577,7 @@ export class PgWriteStore extends PgStore {
}
async updateFtMetadata(ftMetadata: DbFungibleTokenMetadata, dbQueueId: number): Promise<number> {
const length = await this.sql.begin(async sql => {
const length = await this.sqlWriteTransaction(async sql => {
const values: FtMetadataInsertValues = {
token_uri: ftMetadata.token_uri,
name: ftMetadata.name,
@@ -1605,7 +1611,7 @@ export class PgWriteStore extends PgStore {
nftMetadata: DbNonFungibleTokenMetadata,
dbQueueId: number
): Promise<number> {
const length = await this.sql.begin(async sql => {
const length = await this.sqlWriteTransaction(async sql => {
const values: NftMetadataInsertValues = {
token_uri: nftMetadata.token_uri,
name: nftMetadata.name,
@@ -2461,7 +2467,7 @@ export class PgWriteStore extends PgStore {
* @param unanchored - If this refresh is requested from a block or microblock
*/
async refreshNftCustody(txs: DataStoreTxEventData[], unanchored: boolean = false) {
await this.sql.begin(async sql => {
await this.sqlWriteTransaction(async sql => {
const newNftEventCount = txs
.map(tx => tx.nftEvents.length)
.reduce((prev, cur) => prev + cur, 0);
@@ -2495,7 +2501,7 @@ export class PgWriteStore extends PgStore {
if (!this.isEventReplay) {
return;
}
await this.sql.begin(async sql => {
await this.sqlWriteTransaction(async sql => {
await this.refreshMaterializedView('nft_custody', sql, false);
await this.refreshMaterializedView('nft_custody_unanchored', sql, false);
await this.refreshMaterializedView('chain_tip', sql, false);

View File

@@ -132,6 +132,23 @@ export async function getOperations(
minerRewards?: DbMinerReward[],
events?: DbEvent[],
stxUnlockEvents?: StxUnlockEvent[]
): Promise<RosettaOperation[]> {
// Offline store does not support transactions
if (db instanceof PgStore) {
return await db.sqlTransaction(async sql => {
return await getOperationsInternal(tx, db, chainID, minerRewards, events, stxUnlockEvents);
});
}
return await getOperationsInternal(tx, db, chainID, minerRewards, events, stxUnlockEvents);
}
async function getOperationsInternal(
tx: DbTx | DbMempoolTx | BaseTx,
db: PgStore,
chainID: ChainID,
minerRewards?: DbMinerReward[],
events?: DbEvent[],
stxUnlockEvents?: StxUnlockEvent[]
): Promise<RosettaOperation[]> {
const operations: RosettaOperation[] = [];
const txType = getTxTypeString(tx.type_id);

View File

@@ -156,7 +156,7 @@ describe('BNS API tests', () => {
test('Namespace not found', async () => {
const query1 = await supertest(api.server).get(`/v1/namespaces/def/names`);
expect(query1.status).toBe(404);
expect(query1.status).toBe(400);
});
test('Validate: names returned length', async () => {
@@ -737,7 +737,7 @@ describe('BNS API tests', () => {
test('Failure: name info', async () => {
const query1 = await supertest(api.server).get(`/v1/names/testname`);
expect(query1.status).toBe(404);
expect(query1.status).toBe(400);
});
test('Success: fetching name info', async () => {

View File

@@ -436,7 +436,7 @@ describe('BNS integration tests', () => {
expect(query4.status).toBe(200);
const query5 = await supertest(api.server).get(`/v1/names/excluded.${name}.${namespace}`);
expect(query5.status).toBe(404);
expect(query5.status).toBe(400);
expect(query5.type).toBe('application/json');
// testing nameupdate 3

View File

@@ -1086,7 +1086,7 @@ describe('Rosetta API', () => {
},
};
const result = await supertest(api.server).post(`/rosetta/v1/account/balance/`).send(request);
expect(result.status).toBe(500);
expect(result.status).toBe(400);
expect(result.type).toBe('application/json');
const expectResponse = {

View File

@@ -104,7 +104,7 @@ describe('Rosetta offline API', () => {
const query1 = await supertest(api.address)
.post(`/rosetta/v1/network/status`)
.send({ network_identifier: { blockchain: 'stacks', network: 'testnet' } });
expect(query1.status).toBe(500);
expect(query1.status).toBe(400);
});
test('Fail: Offline - block - get latest', async () => {

View File

@@ -29,6 +29,7 @@ import { getPostgres, PgServer, PgSqlClient } from '../datastore/connection';
import { bnsNameCV, bufferToHexPrefixString, I32_MAX } from '../helpers';
import { ChainID } from '@stacks/transactions';
import { TestBlockBuilder } from '../test-utils/test-builders';
import { sqlTransactionContext } from '../datastore/pg-store';
function testEnvVars(
envVars: Record<string, string | undefined>,
@@ -296,6 +297,34 @@ describe('postgres datastore', () => {
);
});
test('postgres transaction connection integrity', async () => {
const usageName = 'stacks-blockchain-api:tests;datastore-crud';
const obj = db.sql;
expect(sqlTransactionContext.getStore()).toBeUndefined();
await db.sqlTransaction(async sql => {
// Transaction flag is open.
expect(sqlTransactionContext.getStore()?.usageName).toBe(usageName);
// New connection object.
const newObj = sql;
expect(obj).not.toEqual(newObj);
expect(sqlTransactionContext.getStore()?.sql).toEqual(newObj);
// Nested tx uses the same connection object.
await db.sqlTransaction(sql => {
expect(sqlTransactionContext.getStore()?.usageName).toBe(usageName);
expect(newObj).toEqual(sql);
});
// Getter returns the same connection object too.
expect(db.sql).toEqual(newObj);
});
// Back to normal.
expect(sqlTransactionContext.getStore()).toBeUndefined();
expect(db.sql).toEqual(obj);
});
test('pg address STX balances', async () => {
const dbBlock: DbBlock = {
block_hash: '0x9876',