mirror of
https://github.com/alexgo-io/stacks-blockchain-api.git
synced 2026-01-12 22:43:34 +08:00
fix: remove live tsv append (#1315)
This commit is contained in:
3
.env
3
.env
@@ -35,9 +35,6 @@ PG_APPLICATION_NAME=stacks-blockchain-api
|
||||
# See https://node-postgres.com/api/pool
|
||||
# PG_CONNECTION_POOL_MAX=10
|
||||
|
||||
# Enable to have stacks-node events streamed to a file while the application is running
|
||||
# STACKS_EXPORT_EVENTS_FILE=/tmp/stacks-events.tsv
|
||||
|
||||
# If specified, controls the Stacks Blockchain API mode. The possible values are:
|
||||
# * `readonly`: Runs the API endpoints without an Event Server that listens to events from a node and
|
||||
# writes them to the local database. The API will only read data from the PG database
|
||||
|
||||
@@ -166,14 +166,6 @@ could be ran:
|
||||
time during import, but sacrifices some historical data. You can use this mode if you're mostly
|
||||
interested in running an API that prioritizes real time information.
|
||||
|
||||
Alternatively, instead of performing the `export-events` command in step 1, an environmental
|
||||
variable can be set which enables events to be streamed to a file as they are received, while the
|
||||
application is running normally. To enable this feature, set the `STACKS_EXPORT_EVENTS_FILE` env var
|
||||
to the file path where events should be appended. Example:
|
||||
```
|
||||
STACKS_EXPORT_EVENTS_FILE=/tmp/stacks-node-events.tsv
|
||||
```
|
||||
|
||||
# Client library
|
||||
|
||||
You can use the Stacks Blockchain API Client library if you require a way to call the API via JavaScript or receive real-time updates via Websockets or Socket.io. Learn more [here](client/README.md).
|
||||
|
||||
@@ -182,12 +182,6 @@ export class PgWriteStore extends PgStore {
|
||||
`Unexpected row count ${insertResult.length} when storing event_observer_requests entry`
|
||||
);
|
||||
}
|
||||
const exportEventsFile = process.env['STACKS_EXPORT_EVENTS_FILE'];
|
||||
if (exportEventsFile) {
|
||||
const result = insertResult[0];
|
||||
const tsvRow = [result.id, result.receive_timestamp, result.event_path, result.payload];
|
||||
fs.appendFileSync(exportEventsFile, tsvRow.join('\t') + '\n');
|
||||
}
|
||||
}
|
||||
|
||||
async update(data: DataStoreBlockUpdateData): Promise<void> {
|
||||
|
||||
@@ -150,8 +150,6 @@ export async function importEventsFromTsv(
|
||||
// Set logger to only output for warnings/errors, otherwise the event replay will result
|
||||
// in the equivalent of months/years of API log output.
|
||||
logger.level = 'warn';
|
||||
// Disable this feature so a redundant export file isn't created while importing from an existing one.
|
||||
delete process.env['STACKS_EXPORT_EVENTS_FILE'];
|
||||
// The current import block height. Will be updated with every `/new_block` event.
|
||||
let blockHeight = 0;
|
||||
let isPruneFinished = false;
|
||||
|
||||
Reference in New Issue
Block a user