|
|
|
|
@@ -69,7 +69,7 @@ import {
|
|
|
|
|
StxUnlockEvent,
|
|
|
|
|
TransferQueryResult,
|
|
|
|
|
} from './common';
|
|
|
|
|
import { connectPostgres, PgServer, PgSqlClient } from './connection';
|
|
|
|
|
import { connectPostgres, getPgConnectionEnvValue, PgServer, PgSqlClient } from './connection';
|
|
|
|
|
import {
|
|
|
|
|
abiColumn,
|
|
|
|
|
BLOCK_COLUMNS,
|
|
|
|
|
@@ -110,6 +110,9 @@ export class PgStore {
|
|
|
|
|
readonly sql: PgSqlClient;
|
|
|
|
|
readonly eventEmitter: PgStoreEventEmitter;
|
|
|
|
|
readonly notifier?: PgNotifier;
|
|
|
|
|
protected get closeTimeout(): number {
|
|
|
|
|
return parseInt(getPgConnectionEnvValue('CLOSE_TIMEOUT', PgServer.default) ?? '5');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
constructor(sql: PgSqlClient, notifier: PgNotifier | undefined = undefined) {
|
|
|
|
|
this.sql = sql;
|
|
|
|
|
@@ -133,7 +136,7 @@ export class PgStore {
|
|
|
|
|
|
|
|
|
|
async close(): Promise<void> {
|
|
|
|
|
await this.notifier?.close();
|
|
|
|
|
await this.sql.end();
|
|
|
|
|
await this.sql.end({ timeout: this.closeTimeout });
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
@@ -211,7 +214,7 @@ export class PgStore {
|
|
|
|
|
blockIdentifer: BlockIdentifier,
|
|
|
|
|
metadata?: DbGetBlockWithMetadataOpts<TWithTxs, TWithMicroblocks>
|
|
|
|
|
): Promise<FoundOrNot<DbGetBlockWithMetadataResponse<TWithTxs, TWithMicroblocks>>> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const block = await this.getBlockInternal(sql, blockIdentifer);
|
|
|
|
|
if (!block.found) {
|
|
|
|
|
return { found: false };
|
|
|
|
|
@@ -377,24 +380,6 @@ export class PgStore {
|
|
|
|
|
return { found: true, result: block } as const;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getBlocks({ limit, offset }: { limit: number; offset: number }) {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
const total = await sql<{ count: number }[]>`
|
|
|
|
|
SELECT block_count AS count FROM chain_tip
|
|
|
|
|
`;
|
|
|
|
|
const results = await sql<BlockQueryResult[]>`
|
|
|
|
|
SELECT ${sql(BLOCK_COLUMNS)}
|
|
|
|
|
FROM blocks
|
|
|
|
|
WHERE canonical = true
|
|
|
|
|
ORDER BY block_height DESC
|
|
|
|
|
LIMIT ${limit}
|
|
|
|
|
OFFSET ${offset}
|
|
|
|
|
`;
|
|
|
|
|
const parsed = results.map(r => parseBlockQueryResult(r));
|
|
|
|
|
return { results: parsed, total: total[0].count } as const;
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Returns Block information with metadata, including accepted and streamed microblocks hash
|
|
|
|
|
* @returns `BlocksWithMetadata` object including list of Blocks with metadata and total count.
|
|
|
|
|
@@ -406,9 +391,21 @@ export class PgStore {
|
|
|
|
|
limit: number;
|
|
|
|
|
offset: number;
|
|
|
|
|
}): Promise<BlocksWithMetadata> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
// get block list
|
|
|
|
|
const { results: blocks, total: block_count } = await this.getBlocks({ limit, offset });
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
// Get blocks with count.
|
|
|
|
|
const countQuery = await sql<{ count: number }[]>`
|
|
|
|
|
SELECT block_count AS count FROM chain_tip
|
|
|
|
|
`;
|
|
|
|
|
const block_count = countQuery[0].count;
|
|
|
|
|
const blocksQuery = await sql<BlockQueryResult[]>`
|
|
|
|
|
SELECT ${sql(BLOCK_COLUMNS)}
|
|
|
|
|
FROM blocks
|
|
|
|
|
WHERE canonical = true
|
|
|
|
|
ORDER BY block_height DESC
|
|
|
|
|
LIMIT ${limit}
|
|
|
|
|
OFFSET ${offset}
|
|
|
|
|
`;
|
|
|
|
|
const blocks = blocksQuery.map(r => parseBlockQueryResult(r));
|
|
|
|
|
const blockHashValues: string[] = [];
|
|
|
|
|
const indexBlockHashValues: string[] = [];
|
|
|
|
|
blocks.forEach(block => {
|
|
|
|
|
@@ -502,7 +499,7 @@ export class PgStore {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getBlockTxsRows(blockHash: string): Promise<FoundOrNot<DbTx[]>> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const blockQuery = await this.getBlockInternal(sql, { hash: blockHash });
|
|
|
|
|
if (!blockQuery.found) {
|
|
|
|
|
throw new Error(`Could not find block by hash ${blockHash}`);
|
|
|
|
|
@@ -525,7 +522,7 @@ export class PgStore {
|
|
|
|
|
async getMicroblock(args: {
|
|
|
|
|
microblockHash: string;
|
|
|
|
|
}): Promise<FoundOrNot<{ microblock: DbMicroblock; txs: string[] }>> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const result = await sql<MicroblockQueryResult[]>`
|
|
|
|
|
SELECT ${sql(MICROBLOCK_COLUMNS)}
|
|
|
|
|
FROM microblocks
|
|
|
|
|
@@ -552,7 +549,7 @@ export class PgStore {
|
|
|
|
|
limit: number;
|
|
|
|
|
offset: number;
|
|
|
|
|
}): Promise<{ result: { microblock: DbMicroblock; txs: string[] }[]; total: number }> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const countQuery = await sql<
|
|
|
|
|
{ total: number }[]
|
|
|
|
|
>`SELECT microblock_count AS total FROM chain_tip`;
|
|
|
|
|
@@ -603,7 +600,7 @@ export class PgStore {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getUnanchoredTxs(): Promise<{ txs: DbTx[] }> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
return this.getUnanchoredTxsInternal(sql);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
@@ -612,7 +609,7 @@ export class PgStore {
|
|
|
|
|
stxAddress: string;
|
|
|
|
|
blockIdentifier: BlockIdentifier;
|
|
|
|
|
}): Promise<FoundOrNot<{ lastExecutedTxNonce: number | null; possibleNextNonce: number }>> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const dbBlock = await this.getBlockInternal(sql, args.blockIdentifier);
|
|
|
|
|
if (!dbBlock.found) {
|
|
|
|
|
return { found: false };
|
|
|
|
|
@@ -644,7 +641,7 @@ export class PgStore {
|
|
|
|
|
possibleNextNonce: number;
|
|
|
|
|
detectedMissingNonces: number[];
|
|
|
|
|
}> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const executedTxNonce = await sql<{ nonce: number | null }[]>`
|
|
|
|
|
SELECT MAX(nonce) nonce
|
|
|
|
|
FROM txs
|
|
|
|
|
@@ -790,7 +787,7 @@ export class PgStore {
|
|
|
|
|
limit: number,
|
|
|
|
|
offset: number
|
|
|
|
|
): Promise<FoundOrNot<{ results: DbTx[]; total: number }>> {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const blockQuery = await this.getBlockInternal(sql, blockIdentifer);
|
|
|
|
|
if (!blockQuery.found) {
|
|
|
|
|
return { found: false };
|
|
|
|
|
@@ -951,13 +948,13 @@ export class PgStore {
|
|
|
|
|
if (args.txIds.length === 0) {
|
|
|
|
|
return [];
|
|
|
|
|
}
|
|
|
|
|
return this.sql.begin(async client => {
|
|
|
|
|
const result = await this.sql<MempoolTxQueryResult[]>`
|
|
|
|
|
SELECT ${unsafeCols(this.sql, [...MEMPOOL_TX_COLUMNS, abiColumn('mempool_txs')])}
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const result = await sql<MempoolTxQueryResult[]>`
|
|
|
|
|
SELECT ${unsafeCols(sql, [...MEMPOOL_TX_COLUMNS, abiColumn('mempool_txs')])}
|
|
|
|
|
FROM mempool_txs
|
|
|
|
|
WHERE tx_id IN ${this.sql(args.txIds)}
|
|
|
|
|
WHERE tx_id IN ${sql(args.txIds)}
|
|
|
|
|
`;
|
|
|
|
|
return await this.parseMempoolTransactions(result, client, args.includeUnanchored);
|
|
|
|
|
return await this.parseMempoolTransactions(result, sql, args.includeUnanchored);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@@ -969,8 +966,8 @@ export class PgStore {
|
|
|
|
|
txId: string;
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
includePruned?: boolean;
|
|
|
|
|
}) {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
}): Promise<FoundOrNot<DbMempoolTx>> {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const result = await sql<MempoolTxQueryResult[]>`
|
|
|
|
|
SELECT ${unsafeCols(sql, [...MEMPOOL_TX_COLUMNS, abiColumn('mempool_txs')])}
|
|
|
|
|
FROM mempool_txs
|
|
|
|
|
@@ -1014,7 +1011,7 @@ export class PgStore {
|
|
|
|
|
limit: number;
|
|
|
|
|
offset: number;
|
|
|
|
|
}): Promise<{ results: DbMempoolTx[]; total: number }> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const droppedStatuses = [
|
|
|
|
|
DbTxStatus.DroppedReplaceByFee,
|
|
|
|
|
DbTxStatus.DroppedReplaceAcrossFork,
|
|
|
|
|
@@ -1051,7 +1048,7 @@ export class PgStore {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getMempoolStats({ lastBlockCount }: { lastBlockCount?: number }): Promise<DbMempoolStats> {
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
return await this.getMempoolStatsInternal({ sql, lastBlockCount });
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
@@ -1228,7 +1225,7 @@ export class PgStore {
|
|
|
|
|
recipientAddress?: string;
|
|
|
|
|
address?: string;
|
|
|
|
|
}): Promise<{ results: DbMempoolTx[]; total: number }> {
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
// If caller did not opt-in to unanchored tx data, then treat unanchored txs as pending mempool txs.
|
|
|
|
|
const unanchoredTxs: string[] = !includeUnanchored
|
|
|
|
|
? (await this.getUnanchoredTxsInternal(sql)).txs.map(tx => tx.tx_id)
|
|
|
|
|
@@ -1289,8 +1286,14 @@ export class PgStore {
|
|
|
|
|
return { found: true, result: { digest: result[0].digest } };
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getTx({ txId, includeUnanchored }: { txId: string; includeUnanchored: boolean }) {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
async getTx({
|
|
|
|
|
txId,
|
|
|
|
|
includeUnanchored,
|
|
|
|
|
}: {
|
|
|
|
|
txId: string;
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
}): Promise<FoundOrNot<DbTx>> {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
const result = await sql<ContractTxQueryResult[]>`
|
|
|
|
|
SELECT ${unsafeCols(sql, [...TX_COLUMNS, abiColumn()])}
|
|
|
|
|
@@ -1330,16 +1333,14 @@ export class PgStore {
|
|
|
|
|
offset: number;
|
|
|
|
|
txTypeFilter: TransactionType[];
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
}) {
|
|
|
|
|
}): Promise<{ results: DbTx[]; total: number }> {
|
|
|
|
|
let totalQuery: { count: number }[];
|
|
|
|
|
let resultQuery: ContractTxQueryResult[];
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
if (txTypeFilter.length === 0) {
|
|
|
|
|
totalQuery = await sql<{ count: number }[]>`
|
|
|
|
|
SELECT ${
|
|
|
|
|
includeUnanchored ? this.sql('tx_count_unanchored') : this.sql('tx_count')
|
|
|
|
|
} AS count
|
|
|
|
|
SELECT ${includeUnanchored ? sql('tx_count_unanchored') : sql('tx_count')} AS count
|
|
|
|
|
FROM chain_tip
|
|
|
|
|
`;
|
|
|
|
|
resultQuery = await sql<ContractTxQueryResult[]>`
|
|
|
|
|
@@ -1373,15 +1374,15 @@ export class PgStore {
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
getTxListEvents(args: {
|
|
|
|
|
async getTxListEvents(args: {
|
|
|
|
|
txs: {
|
|
|
|
|
txId: string;
|
|
|
|
|
indexBlockHash: string;
|
|
|
|
|
}[];
|
|
|
|
|
limit: number;
|
|
|
|
|
offset: number;
|
|
|
|
|
}) {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
}): Promise<{ results: DbEvent[] }> {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
if (args.txs.length === 0) return { results: [] };
|
|
|
|
|
// TODO: This hack has to be done because postgres.js can't figure out how to interpolate
|
|
|
|
|
// these `bytea` VALUES comparisons yet.
|
|
|
|
|
@@ -1494,13 +1495,18 @@ export class PgStore {
|
|
|
|
|
/**
|
|
|
|
|
* TODO investigate if this method needs be deprecated in favor of {@link getTransactionEvents}
|
|
|
|
|
*/
|
|
|
|
|
async getTxEvents(args: { txId: string; indexBlockHash: string; limit: number; offset: number }) {
|
|
|
|
|
async getTxEvents(args: {
|
|
|
|
|
txId: string;
|
|
|
|
|
indexBlockHash: string;
|
|
|
|
|
limit: number;
|
|
|
|
|
offset: number;
|
|
|
|
|
}): Promise<{ results: DbEvent[] }> {
|
|
|
|
|
// Note: when this is used to fetch events for an unanchored microblock tx, the `indexBlockHash` is empty
|
|
|
|
|
// which will cause the sql queries to also match micro-orphaned tx data (resulting in duplicate event results).
|
|
|
|
|
// To prevent that, all micro-orphaned events are excluded using `microblock_orphaned=false`.
|
|
|
|
|
// That means, unlike regular orphaned txs, if a micro-orphaned tx is never re-mined, the micro-orphaned event data
|
|
|
|
|
// will never be returned.
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const eventIndexStart = args.offset;
|
|
|
|
|
const eventIndexEnd = args.offset + args.limit - 1;
|
|
|
|
|
const stxLockResults = await sql<
|
|
|
|
|
@@ -1617,8 +1623,8 @@ export class PgStore {
|
|
|
|
|
eventTypeFilter: DbEventTypeId[];
|
|
|
|
|
limit: number;
|
|
|
|
|
offset: number;
|
|
|
|
|
}) {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
}): Promise<{ results: DbEvent[] }> {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const refValue = args.addressOrTxId.address ?? args.addressOrTxId.txId;
|
|
|
|
|
const isAddress = args.addressOrTxId.address !== undefined;
|
|
|
|
|
const emptyEvents = sql`SELECT NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL`;
|
|
|
|
|
@@ -1947,7 +1953,7 @@ export class PgStore {
|
|
|
|
|
stxAddress: string;
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
}): Promise<DbStxBalance> {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const blockQuery = await this.getCurrentBlockInternal(sql);
|
|
|
|
|
if (!blockQuery.found) {
|
|
|
|
|
throw new Error(`Could not find current block`);
|
|
|
|
|
@@ -1967,7 +1973,7 @@ export class PgStore {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getStxBalanceAtBlock(stxAddress: string, blockHeight: number): Promise<DbStxBalance> {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const chainTip = await this.getChainTip(sql);
|
|
|
|
|
const blockHeightToQuery =
|
|
|
|
|
blockHeight > chainTip.blockHeight ? chainTip.blockHeight : blockHeight;
|
|
|
|
|
@@ -2079,8 +2085,8 @@ export class PgStore {
|
|
|
|
|
blockHeight: number;
|
|
|
|
|
}
|
|
|
|
|
| { includeUnanchored: boolean }
|
|
|
|
|
) {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
): Promise<{ stx: bigint; blockHeight: number }> {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
let atBlockHeight: number;
|
|
|
|
|
let atMatureBlockHeight: number;
|
|
|
|
|
if ('blockHeight' in args) {
|
|
|
|
|
@@ -2337,7 +2343,7 @@ export class PgStore {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getTxStatus(txId: string): Promise<FoundOrNot<DbTxGlobalStatus>> {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const chainResult = await sql<DbTxGlobalStatus[]>`
|
|
|
|
|
SELECT status, index_block_hash, microblock_hash
|
|
|
|
|
FROM txs
|
|
|
|
|
@@ -2663,7 +2669,7 @@ export class PgStore {
|
|
|
|
|
|
|
|
|
|
async searchHash({ hash }: { hash: string }): Promise<FoundOrNot<DbSearchResult>> {
|
|
|
|
|
// TODO(mb): add support for searching for microblock by hash
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const txQuery = await sql<ContractTxQueryResult[]>`
|
|
|
|
|
SELECT ${unsafeCols(sql, [...TX_COLUMNS, abiColumn()])}
|
|
|
|
|
FROM txs WHERE tx_id = ${hash} LIMIT 1
|
|
|
|
|
@@ -2722,7 +2728,7 @@ export class PgStore {
|
|
|
|
|
entity_id: principal,
|
|
|
|
|
},
|
|
|
|
|
} as const;
|
|
|
|
|
return await this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
if (isContract) {
|
|
|
|
|
const contractMempoolTxResult = await sql<MempoolTxQueryResult[]>`
|
|
|
|
|
SELECT ${unsafeCols(sql, [...MEMPOOL_TX_COLUMNS, abiColumn('mempool_txs')])}
|
|
|
|
|
@@ -3114,7 +3120,7 @@ export class PgStore {
|
|
|
|
|
if (txIds.length === 0) {
|
|
|
|
|
return [];
|
|
|
|
|
}
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
const result = await sql<ContractTxQueryResult[]>`
|
|
|
|
|
SELECT ${unsafeCols(sql, [...TX_COLUMNS, abiColumn()])}
|
|
|
|
|
@@ -3134,7 +3140,7 @@ export class PgStore {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getNamespaceList({ includeUnanchored }: { includeUnanchored: boolean }) {
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
return await sql<{ namespace_id: string }[]>`
|
|
|
|
|
SELECT DISTINCT ON (namespace_id) namespace_id
|
|
|
|
|
@@ -3160,7 +3166,7 @@ export class PgStore {
|
|
|
|
|
results: string[];
|
|
|
|
|
}> {
|
|
|
|
|
const offset = page * 100;
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
return await sql<{ name: string }[]>`
|
|
|
|
|
SELECT DISTINCT ON (name) name
|
|
|
|
|
@@ -3184,7 +3190,7 @@ export class PgStore {
|
|
|
|
|
namespace: string;
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
}): Promise<FoundOrNot<DbBnsNamespace & { index_block_hash: string }>> {
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
return await sql<(DbBnsNamespace & { tx_id: string; index_block_hash: string })[]>`
|
|
|
|
|
SELECT DISTINCT ON (namespace_id) namespace_id, *
|
|
|
|
|
@@ -3218,7 +3224,7 @@ export class PgStore {
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
chainId: ChainID;
|
|
|
|
|
}): Promise<FoundOrNot<DbBnsName & { index_block_hash: string }>> {
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
const nameZonefile = await sql<(DbBnsName & { tx_id: string; index_block_hash: string })[]>`
|
|
|
|
|
SELECT n.*, z.zonefile
|
|
|
|
|
@@ -3257,7 +3263,7 @@ export class PgStore {
|
|
|
|
|
zoneFileHash: string;
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
}): Promise<FoundOrNot<DbBnsZoneFile>> {
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, {
|
|
|
|
|
includeUnanchored: args.includeUnanchored,
|
|
|
|
|
});
|
|
|
|
|
@@ -3313,7 +3319,7 @@ export class PgStore {
|
|
|
|
|
name: string;
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
}): Promise<FoundOrNot<DbBnsZoneFile>> {
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
// Depending on the kind of name we got, use the correct table to pivot on canonical chain
|
|
|
|
|
// state to get the zonefile. We can't pivot on the `txs` table because some names/subdomains
|
|
|
|
|
@@ -3366,7 +3372,7 @@ export class PgStore {
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
chainId: ChainID;
|
|
|
|
|
}): Promise<FoundOrNot<string[]>> {
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
// 1. Get subdomains owned by this address.
|
|
|
|
|
// These don't produce NFT events so we have to look directly at the `subdomains` table.
|
|
|
|
|
@@ -3450,7 +3456,7 @@ export class PgStore {
|
|
|
|
|
name: string;
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
}): Promise<{ results: string[] }> {
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
return await sql<{ fully_qualified_subdomain: string }[]>`
|
|
|
|
|
SELECT DISTINCT ON (fully_qualified_subdomain) fully_qualified_subdomain
|
|
|
|
|
@@ -3474,7 +3480,7 @@ export class PgStore {
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
}) {
|
|
|
|
|
const offset = page * 100;
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
return await sql<{ fully_qualified_subdomain: string }[]>`
|
|
|
|
|
SELECT DISTINCT ON (fully_qualified_subdomain) fully_qualified_subdomain
|
|
|
|
|
@@ -3492,7 +3498,7 @@ export class PgStore {
|
|
|
|
|
|
|
|
|
|
async getNamesList({ page, includeUnanchored }: { page: number; includeUnanchored: boolean }) {
|
|
|
|
|
const offset = page * 100;
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
return await sql<{ name: string }[]>`
|
|
|
|
|
SELECT DISTINCT ON (name) name
|
|
|
|
|
@@ -3515,7 +3521,7 @@ export class PgStore {
|
|
|
|
|
subdomain: string;
|
|
|
|
|
includeUnanchored: boolean;
|
|
|
|
|
}): Promise<FoundOrNot<DbBnsSubdomain & { index_block_hash: string }>> {
|
|
|
|
|
const queryResult = await this.sql.begin(async sql => {
|
|
|
|
|
const queryResult = await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const maxBlockHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
|
|
|
|
|
return await sql<(DbBnsSubdomain & { tx_id: string; index_block_hash: string })[]>`
|
|
|
|
|
SELECT s.*, z.zonefile
|
|
|
|
|
@@ -3602,7 +3608,7 @@ export class PgStore {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async getUnlockedAddressesAtBlock(block: DbBlock): Promise<StxUnlockEvent[]> {
|
|
|
|
|
return this.sql.begin(async client => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async client => {
|
|
|
|
|
return await this.internalGetUnlockedAccountsAtHeight(client, block);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
@@ -3724,14 +3730,14 @@ export class PgStore {
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
getFtMetadataList({
|
|
|
|
|
async getFtMetadataList({
|
|
|
|
|
limit,
|
|
|
|
|
offset,
|
|
|
|
|
}: {
|
|
|
|
|
limit: number;
|
|
|
|
|
offset: number;
|
|
|
|
|
}): Promise<{ results: DbFungibleTokenMetadata[]; total: number }> {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const totalQuery = await sql<{ count: number }[]>`
|
|
|
|
|
SELECT COUNT(*)::integer
|
|
|
|
|
FROM ft_metadata
|
|
|
|
|
@@ -3761,14 +3767,14 @@ export class PgStore {
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
getNftMetadataList({
|
|
|
|
|
async getNftMetadataList({
|
|
|
|
|
limit,
|
|
|
|
|
offset,
|
|
|
|
|
}: {
|
|
|
|
|
limit: number;
|
|
|
|
|
offset: number;
|
|
|
|
|
}): Promise<{ results: DbNonFungibleTokenMetadata[]; total: number }> {
|
|
|
|
|
return this.sql.begin(async sql => {
|
|
|
|
|
return await this.sql.begin('READ ONLY', async sql => {
|
|
|
|
|
const totalQuery = await sql<{ count: number }[]>`
|
|
|
|
|
SELECT COUNT(*)::integer
|
|
|
|
|
FROM nft_metadata
|
|
|
|
|
|