From 14bb9f2c6d1367e56879a5f6b2faee018f73eeae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Chris=20Guimar=C3=A3es?= Date: Wed, 3 May 2023 17:25:46 +0100 Subject: [PATCH] Logging migration to Pino library (#1630) * feat: add new logger based on Pino library * feat: changes on api/init.ts to use Pino logging library * feat: changes on helpers.ts to use Pino logging library * feat: changes on event-stream/event-server.ts to use Pino logging library * feat: changes on API tests to use Pino logging library * feat: changes on API codebase to use Pino logging library * fix: lint * feat: changes on logger * feat: remove logError function * feat: change logger.verbose to logger.debug * fix: lint * fix: first argument on logger should be an object * feat: add logging information to ErrorRequestHandler * fix: remove logger from ErrorRequestHandler * fix: remove try-catch statement Errors shoud be handled by the ErrorRequestHandler middleware * feat: disable automatic 'request completed' logging --- package-lock.json | 1075 +++++++++-------- package.json | 5 +- src/api/controllers/cache-controller.ts | 9 +- src/api/controllers/db-controller.ts | 3 +- src/api/init.ts | 39 +- src/api/rosetta-validate.ts | 3 +- src/api/routes/address.ts | 63 +- src/api/routes/block.ts | 9 +- src/api/routes/bns/pricing.ts | 3 +- src/api/routes/burnchain.ts | 9 +- src/api/routes/contract.ts | 20 +- src/api/routes/core-node-rpc-proxy.ts | 20 +- src/api/routes/faucets.ts | 3 +- src/api/routes/microblock.ts | 9 +- src/api/routes/pox2.ts | 9 +- src/api/routes/stacking.ts | 9 +- src/api/routes/status.ts | 2 - src/api/routes/tokens/tokens.ts | 36 +- src/api/routes/tx.ts | 72 +- .../routes/ws/channels/socket-io-channel.ts | 15 +- src/api/routes/ws/channels/ws-rpc-channel.ts | 28 +- src/api/routes/ws/web-socket-transmitter.ts | 12 +- src/api/validate.ts | 3 +- src/btc-faucet.ts | 13 +- src/core-rpc/client.ts | 7 +- src/datastore/connection-legacy.ts | 9 +- src/datastore/connection.ts | 7 +- src/datastore/helpers.ts | 5 +- src/datastore/migrations.ts | 5 +- src/datastore/pg-notifier.ts | 6 +- src/datastore/pg-write-store.ts | 63 +- src/event-replay/event-replay.ts | 4 +- src/event-stream/event-server.ts | 81 +- src/event-stream/pox2-event-parsing.ts | 5 +- src/event-stream/reader.ts | 21 +- src/helpers.ts | 78 +- src/import-v1/index.ts | 26 +- src/index.ts | 11 +- src/inspector-util.ts | 41 +- src/logger.ts | 29 + src/rosetta-helpers.ts | 3 +- src/shutdown-handler.ts | 13 +- src/tests-bns/api.ts | 2 +- src/tests-bns/bns-integration-tests.ts | 7 +- src/tests-bns/event-server-tests.ts | 3 +- src/tests-bns/v1-import-tests.ts | 4 +- src/tests-btc-faucet/faucet-btc-tests.ts | 1 - src/tests-event-replay/import-export-tests.ts | 1 - .../raw-event-request-tests.ts | 2 - src/tests-rosetta/account-tests.ts | 4 +- src/tests-rosetta/block-tests.ts | 2 +- src/tests-tokens/strict-mode-tests.ts | 2 +- src/tests-tokens/tokens-metadata-tests.ts | 9 +- src/tests/address-tests.ts | 2 +- src/tests/block-tests.ts | 2 +- src/tests/burnchain-tests.ts | 2 +- src/tests/cache-control-tests.ts | 2 +- src/tests/mempool-tests.ts | 2 +- src/tests/microblock-tests.ts | 10 +- src/tests/other-tests.ts | 2 +- src/tests/search-tests.ts | 2 +- src/tests/smart-contract-tests.ts | 2 +- src/tests/socket-io-tests.ts | 1 - src/tests/token-tests.ts | 2 +- src/tests/tx-tests.ts | 2 +- src/tests/v2-proxy-tests.ts | 1 - src/tests/websocket-tests.ts | 1 - src/token-metadata/tokens-contract-handler.ts | 3 +- src/token-metadata/tokens-processor-queue.ts | 9 +- 69 files changed, 890 insertions(+), 1075 deletions(-) create mode 100644 src/logger.ts diff --git a/package-lock.json b/package-lock.json index efd68a75..a9032bc3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -38,7 +38,6 @@ "evt": "1.10.1", "express": "4.17.3", "express-list-endpoints": "5.0.0", - "express-winston": "4.2.0", "getopts": "2.3.0", "http-proxy-middleware": "2.0.1", "jsonc-parser": "3.0.0", @@ -52,6 +51,9 @@ "pg": "8.7.1", "pg-copy-streams": "5.1.1", "pg-cursor": "2.7.1", + "pino": "8.11.0", + "pino-http": "8.3.3", + "pino-pretty": "10.0.0", "postgres": "3.3.1", "prom-client": "14.0.1", "rpc-bitcoin": "2.0.0", @@ -64,7 +66,6 @@ "ts-unused-exports": "7.0.3", "typescript": "4.6.2", "uuid": "8.3.2", - "winston": "3.6.0", "ws": "7.5.9", "zone-file": "2.0.0-beta.3" }, @@ -842,14 +843,6 @@ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, - "node_modules/@colors/colors": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", - "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", - "engines": { - "node": ">=0.1.90" - } - }, "node_modules/@commitlint/cli": { "version": "9.1.2", "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-9.1.2.tgz", @@ -1086,16 +1079,6 @@ "node": ">=12" } }, - "node_modules/@dabh/diagnostics": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", - "integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==", - "dependencies": { - "colorspace": "1.1.x", - "enabled": "2.0.x", - "kuler": "^2.0.0" - } - }, "node_modules/@eslint/eslintrc": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.2.1.tgz", @@ -3262,6 +3245,17 @@ "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", "devOptional": true }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -3535,11 +3529,6 @@ "node": ">=0.8" } }, - "node_modules/async": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", - "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==" - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -3551,6 +3540,14 @@ "integrity": "sha1-D+9a1G8b16hQLGVyfwNn1e5D1pY=", "dev": true }, + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", @@ -3659,8 +3656,7 @@ "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "devOptional": true + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "node_modules/base-x": { "version": "3.0.9", @@ -4182,15 +4178,6 @@ "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==", "dev": true }, - "node_modules/color": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", - "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", - "dependencies": { - "color-convert": "^1.9.3", - "color-string": "^1.6.0" - } - }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -4207,15 +4194,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/color-string": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.0.tgz", - "integrity": "sha512-9Mrz2AQLefkH1UvASKj6v6hj/7eWgjnT/cVsR8CumieLoT+g900exWeNogqtweI8dxloXN9BDQTYro1oWu/5CQ==", - "dependencies": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, "node_modules/color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", @@ -4225,27 +4203,10 @@ "color-support": "bin.js" } }, - "node_modules/color/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/color/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "node_modules/colorspace": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz", - "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==", - "dependencies": { - "color": "^3.1.3", - "text-hex": "1.0.x" - } + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==" }, "node_modules/combined-stream": { "version": "1.0.8", @@ -4656,6 +4617,14 @@ "url": "https://opencollective.com/date-fns" } }, + "node_modules/dateformat": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", + "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==", + "engines": { + "node": "*" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -4962,11 +4931,6 @@ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, - "node_modules/enabled": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz", - "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==" - }, "node_modules/encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", @@ -4979,7 +4943,6 @@ "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, "dependencies": { "once": "^1.4.0" } @@ -5720,11 +5683,27 @@ "node": ">= 0.6" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/eventemitter3": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, "node_modules/evt": { "version": "1.10.1", "resolved": "https://registry.npmjs.org/evt/-/evt-1.10.1.tgz", @@ -5829,85 +5808,6 @@ "node": ">=10" } }, - "node_modules/express-winston": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/express-winston/-/express-winston-4.2.0.tgz", - "integrity": "sha512-EMD74g63nVHi7pFleQw7KHCxiA1pjF5uCwbCfzGqmFxs9KvlDPIVS3cMGpULm6MshExMT9TjC3SqmRGB9kb7yw==", - "dependencies": { - "chalk": "^2.4.2", - "lodash": "^4.17.21" - }, - "engines": { - "node": ">= 6" - }, - "peerDependencies": { - "winston": ">=3.x <4" - } - }, - "node_modules/express-winston/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/express-winston/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/express-winston/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/express-winston/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "node_modules/express-winston/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/express-winston/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "engines": { - "node": ">=4" - } - }, - "node_modules/express-winston/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/express/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -5939,6 +5839,11 @@ "node >=0.6.0" ] }, + "node_modules/fast-copy": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-3.0.1.tgz", + "integrity": "sha512-Knr7NOtK3HWRYGtHoJrjkaWepqT8thIVGAwt0p0aUs1zqkAzXZV4vo9fFNwyb5fcqK1GKYFYxldQdIDVKhUAfA==" + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -5977,6 +5882,19 @@ "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", "dev": true }, + "node_modules/fast-redact": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.1.2.tgz", + "integrity": "sha512-+0em+Iya9fKGfEQGcd62Yv6onjBmmhV1uh86XVfOU8VwAe6kaFdQCWI9s0/Nnugx5Vd9tdbZ7e6gE2tR9dzXdw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, "node_modules/fastq": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", @@ -5995,11 +5913,6 @@ "bser": "2.1.1" } }, - "node_modules/fecha": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.2.tgz", - "integrity": "sha512-5rOQWkBVz3FnYWTi/ELZmq4CoK1Pb+xKNZWuJRsOwo0+8DrP43CrWJtyLVvb5U7z7ggE5llahfDbLjaVNzXVJQ==" - }, "node_modules/file-entry-cache": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", @@ -6115,11 +6028,6 @@ "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==", "dev": true }, - "node_modules/fn.name": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz", - "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" - }, "node_modules/follow-redirects": { "version": "1.14.9", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz", @@ -6216,8 +6124,7 @@ "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "devOptional": true + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "node_modules/fsevents": { "version": "2.3.2", @@ -6571,6 +6478,52 @@ "minimalistic-assert": "^1.0.1" } }, + "node_modules/help-me": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/help-me/-/help-me-4.2.0.tgz", + "integrity": "sha512-TAOnTB8Tz5Dw8penUuzHVrKNKlCIbwwbHnXraNJxPwf8LRtE2HlM84RYuezMFcwOJmoYOCWVDyJ8TQGxn9PgxA==", + "dependencies": { + "glob": "^8.0.0", + "readable-stream": "^3.6.0" + } + }, + "node_modules/help-me/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/help-me/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/help-me/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/hmac-drbg": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", @@ -6879,7 +6832,6 @@ "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "devOptional": true, "dependencies": { "once": "^1.3.0", "wrappy": "1" @@ -7152,6 +7104,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, "engines": { "node": ">=8" }, @@ -7975,6 +7928,14 @@ "integrity": "sha1-o6vicYryQaKykE+EpiWXDzia4yo=", "dev": true }, + "node_modules/joycon": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", + "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==", + "engines": { + "node": ">=10" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -8170,11 +8131,6 @@ "node": ">=6" } }, - "node_modules/kuler": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz", - "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==" - }, "node_modules/leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", @@ -8259,18 +8215,6 @@ "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=", "dev": true }, - "node_modules/logform": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/logform/-/logform-2.4.0.tgz", - "integrity": "sha512-CPSJw4ftjf517EhXZGGvTHHkYobo7ZCc0kvwUoOYcjfR2UVrI66RHj8MCrfAdEitdmFqbu2BYdYs8FHHZSb6iw==", - "dependencies": { - "@colors/colors": "1.5.0", - "fecha": "^4.2.0", - "ms": "^2.1.1", - "safe-stable-stringify": "^2.3.1", - "triple-beam": "^1.3.0" - } - }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -9034,6 +8978,11 @@ "integrity": "sha512-fZ4qZdQ2nxJvtcasX7Ghl+WlWS/d9IgnBIwFZXVNNZUmzpno91SX5bc5vuxiuKoCtK78XxGGNuSCrDC7xYB3OQ==", "dev": true }, + "node_modules/on-exit-leak-free": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.0.tgz", + "integrity": "sha512-VuCaZZAjReZ3vUwgOB8LxAosIurDiAW0s13rI1YwmaP++jvcxP77AWoQvenZebpCA2m8WC1/EosPYPMjnRAp/w==" + }, "node_modules/on-finished": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", @@ -9049,19 +8998,10 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "devOptional": true, "dependencies": { "wrappy": "1" } }, - "node_modules/one-time": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz", - "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==", - "dependencies": { - "fn.name": "1.x.x" - } - }, "node_modules/onetime": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", @@ -9440,6 +9380,158 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pino": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/pino/-/pino-8.11.0.tgz", + "integrity": "sha512-Z2eKSvlrl2rH8p5eveNUnTdd4AjJk8tAsLkHYZQKGHP4WTh2Gi1cOSOs3eWPqaj+niS3gj4UkoreoaWgF3ZWYg==", + "dependencies": { + "atomic-sleep": "^1.0.0", + "fast-redact": "^3.1.1", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "v1.0.0", + "pino-std-serializers": "^6.0.0", + "process-warning": "^2.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^3.1.0", + "thread-stream": "^2.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-1.0.0.tgz", + "integrity": "sha512-c7vo5OpW4wIS42hUVcT5REsL8ZljsUfBjqV/e2sFxmFEFZiq1XLUp5EYLtuDH6PEHq9W1egWqRbnLUP5FuZmOA==", + "dependencies": { + "readable-stream": "^4.0.0", + "split2": "^4.0.0" + } + }, + "node_modules/pino-abstract-transport/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/pino-abstract-transport/node_modules/readable-stream": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.3.0.tgz", + "integrity": "sha512-MuEnA0lbSi7JS8XM+WNJlWZkHAAdm7gETHdFK//Q/mChGyj2akEFtdLZh32jSdkWGbRwCW9pn6g3LWDdDeZnBQ==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/pino-abstract-transport/node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/pino-http": { + "version": "8.3.3", + "resolved": "https://registry.npmjs.org/pino-http/-/pino-http-8.3.3.tgz", + "integrity": "sha512-p4umsNIXXVu95HD2C8wie/vXH7db5iGRpc+yj1/ZQ3sRtTQLXNjoS6Be5+eI+rQbqCRxen/7k/KSN+qiZubGDw==", + "dependencies": { + "get-caller-file": "^2.0.5", + "pino": "^8.0.0", + "pino-std-serializers": "^6.0.0", + "process-warning": "^2.0.0" + } + }, + "node_modules/pino-pretty": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-10.0.0.tgz", + "integrity": "sha512-zKFjYXBzLaLTEAN1ayKpHXtL5UeRQC7R3lvhKe7fWs7hIVEjKGG/qIXwQt9HmeUp71ogUd/YcW+LmMwRp4KT6Q==", + "dependencies": { + "colorette": "^2.0.7", + "dateformat": "^4.6.3", + "fast-copy": "^3.0.0", + "fast-safe-stringify": "^2.1.1", + "help-me": "^4.0.1", + "joycon": "^3.1.1", + "minimist": "^1.2.6", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^1.0.0", + "pump": "^3.0.0", + "readable-stream": "^4.0.0", + "secure-json-parse": "^2.4.0", + "sonic-boom": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "bin": { + "pino-pretty": "bin.js" + } + }, + "node_modules/pino-pretty/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/pino-pretty/node_modules/readable-stream": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.3.0.tgz", + "integrity": "sha512-MuEnA0lbSi7JS8XM+WNJlWZkHAAdm7gETHdFK//Q/mChGyj2akEFtdLZh32jSdkWGbRwCW9pn6g3LWDdDeZnBQ==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/pino-std-serializers": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-6.2.0.tgz", + "integrity": "sha512-IWgSzUL8X1w4BIWTwErRgtV8PyOGOOi60uqv0oKuS/fOA8Nco/OeI6lBuc4dyP8MMfdFwyHqTMcBIA7nDiqEqA==" + }, "node_modules/pirates": { "version": "4.0.5", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", @@ -9634,12 +9726,25 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", + "engines": { + "node": ">= 0.6.0" + } + }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", "dev": true }, + "node_modules/process-warning": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-2.2.0.tgz", + "integrity": "sha512-/1WZ8+VQjR6avWOgHeEPd7SDQmFQ1B5mC1eRXsCm5TarlNmx/wCsa5GEaxGm05BORRtyG/Ex/3xq3TuRvq57qg==" + }, "node_modules/progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", @@ -9709,7 +9814,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -9773,6 +9877,11 @@ } ] }, + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==" + }, "node_modules/quick-lru": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", @@ -9919,6 +10028,14 @@ "node": ">=8.10.0" } }, + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", + "engines": { + "node": ">= 12.13.0" + } + }, "node_modules/redent": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", @@ -10380,6 +10497,11 @@ "node": ">=10" } }, + "node_modules/secure-json-parse": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", + "integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==" + }, "node_modules/semver": { "version": "7.3.2", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", @@ -10539,19 +10661,6 @@ "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "devOptional": true }, - "node_modules/simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", - "dependencies": { - "is-arrayish": "^0.3.1" - } - }, - "node_modules/simple-swizzle/node_modules/is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" - }, "node_modules/simple-update-notifier": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.0.7.tgz", @@ -10636,6 +10745,14 @@ "node": ">=10.0.0" } }, + "node_modules/sonic-boom": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-3.3.0.tgz", + "integrity": "sha512-LYxp34KlZ1a2Jb8ZQgFCK3niIHzibdwtwNUWKg0qQRzsDoJ3Gfgkf8KdBTFU3SkejDEIlWwnSnpVdOZIhFMl/g==", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, "node_modules/source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -10729,14 +10846,6 @@ "node": ">=0.10.0" } }, - "node_modules/stack-trace": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", - "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=", - "engines": { - "node": "*" - } - }, "node_modules/stack-utils": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", @@ -10913,7 +11022,6 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, "engines": { "node": ">=8" }, @@ -11115,17 +11223,20 @@ "node": ">=0.10" } }, - "node_modules/text-hex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", - "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" - }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", "dev": true }, + "node_modules/thread-stream": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-2.3.0.tgz", + "integrity": "sha512-kaDqm1DET9pp3NXwR8382WHbnpXnRkN9xGN9dQt3B2+dmXiW8X1SOwmFOxAErEQ47ObhZ96J6yhZNXuyCOL7KA==", + "dependencies": { + "real-require": "^0.2.0" + } + }, "node_modules/throat": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/throat/-/throat-6.0.1.tgz", @@ -11263,11 +11374,6 @@ "node": ">=8" } }, - "node_modules/triple-beam": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz", - "integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==" - }, "node_modules/ts-essentials": { "version": "9.0.0", "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-9.0.0.tgz", @@ -12016,39 +12122,6 @@ "which": "bin/which" } }, - "node_modules/winston": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/winston/-/winston-3.6.0.tgz", - "integrity": "sha512-9j8T75p+bcN6D00sF/zjFVmPp+t8KMPB1MzbbzYjeN9VWxdsYnTB40TkbNUEXAmILEfChMvAMgidlX64OG3p6w==", - "dependencies": { - "@dabh/diagnostics": "^2.0.2", - "async": "^3.2.3", - "is-stream": "^2.0.0", - "logform": "^2.4.0", - "one-time": "^1.0.0", - "readable-stream": "^3.4.0", - "safe-stable-stringify": "^2.3.1", - "stack-trace": "0.0.x", - "triple-beam": "^1.3.0", - "winston-transport": "^4.5.0" - }, - "engines": { - "node": ">= 12.0.0" - } - }, - "node_modules/winston-transport": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.5.0.tgz", - "integrity": "sha512-YpZzcUzBedhlTAfJg6vJDlyEai/IFMIVcaEZZyl3UXIl4gmqRpU7AE89AHLkbzLUsv0NVmw7ts+iztqKxxPW1Q==", - "dependencies": { - "logform": "^2.3.2", - "readable-stream": "^3.6.0", - "triple-beam": "^1.3.0" - }, - "engines": { - "node": ">= 6.4.0" - } - }, "node_modules/word-wrap": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", @@ -12077,8 +12150,7 @@ "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "devOptional": true + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "node_modules/write-file-atomic": { "version": "3.0.3", @@ -12751,11 +12823,6 @@ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, - "@colors/colors": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", - "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==" - }, "@commitlint/cli": { "version": "9.1.2", "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-9.1.2.tgz", @@ -12935,16 +13002,6 @@ "@cspotcode/source-map-consumer": "0.8.0" } }, - "@dabh/diagnostics": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", - "integrity": "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==", - "requires": { - "colorspace": "1.1.x", - "enabled": "2.0.x", - "kuler": "^2.0.0" - } - }, "@eslint/eslintrc": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.2.1.tgz", @@ -14705,6 +14762,14 @@ "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", "devOptional": true }, + "abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "requires": { + "event-target-shim": "^5.0.0" + } + }, "accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -14904,11 +14969,6 @@ "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" }, - "async": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.3.tgz", - "integrity": "sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g==" - }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -14920,6 +14980,11 @@ "integrity": "sha1-D+9a1G8b16hQLGVyfwNn1e5D1pY=", "dev": true }, + "atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==" + }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", @@ -15004,8 +15069,7 @@ "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "devOptional": true + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "base-x": { "version": "3.0.9", @@ -15395,30 +15459,6 @@ "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==", "dev": true }, - "color": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/color/-/color-3.2.1.tgz", - "integrity": "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA==", - "requires": { - "color-convert": "^1.9.3", - "color-string": "^1.6.0" - }, - "dependencies": { - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - } - } - }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -15432,29 +15472,16 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "color-string": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.0.tgz", - "integrity": "sha512-9Mrz2AQLefkH1UvASKj6v6hj/7eWgjnT/cVsR8CumieLoT+g900exWeNogqtweI8dxloXN9BDQTYro1oWu/5CQ==", - "requires": { - "color-name": "^1.0.0", - "simple-swizzle": "^0.2.2" - } - }, "color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", "optional": true }, - "colorspace": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz", - "integrity": "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w==", - "requires": { - "color": "^3.1.3", - "text-hex": "1.0.x" - } + "colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==" }, "combined-stream": { "version": "1.0.8", @@ -15784,6 +15811,11 @@ "integrity": "sha512-8d35hViGYx/QH0icHYCeLmsLmMUheMmTyV9Fcm6gvNwdw31yXXH+O85sOBJ+OLnLQMKZowvpKb6FgMIQjcpvQw==", "dev": true }, + "dateformat": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", + "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==" + }, "debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -16020,11 +16052,6 @@ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, - "enabled": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz", - "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==" - }, "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", @@ -16034,7 +16061,6 @@ "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, "requires": { "once": "^1.4.0" } @@ -16586,11 +16612,21 @@ "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" }, + "event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" + }, "eventemitter3": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" }, + "events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==" + }, "evt": { "version": "1.10.1", "resolved": "https://registry.npmjs.org/evt/-/evt-1.10.1.tgz", @@ -16697,66 +16733,6 @@ "resolved": "https://registry.npmjs.org/express-list-endpoints/-/express-list-endpoints-5.0.0.tgz", "integrity": "sha512-bjypzWA7AQ64VNLbQ3GqwDzLxqK1NIHFRJvFDb7wg0S7YwquI8l8XZsmF0yiEA22gN+p+G6+1KDcXxb6Dn3OaA==" }, - "express-winston": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/express-winston/-/express-winston-4.2.0.tgz", - "integrity": "sha512-EMD74g63nVHi7pFleQw7KHCxiA1pjF5uCwbCfzGqmFxs9KvlDPIVS3cMGpULm6MshExMT9TjC3SqmRGB9kb7yw==", - "requires": { - "chalk": "^2.4.2", - "lodash": "^4.17.21" - }, - "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "requires": { - "color-convert": "^1.9.0" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "requires": { - "has-flag": "^3.0.0" - } - } - } - }, "extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -16767,6 +16743,11 @@ "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" }, + "fast-copy": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-3.0.1.tgz", + "integrity": "sha512-Knr7NOtK3HWRYGtHoJrjkaWepqT8thIVGAwt0p0aUs1zqkAzXZV4vo9fFNwyb5fcqK1GKYFYxldQdIDVKhUAfA==" + }, "fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -16802,6 +16783,16 @@ "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", "dev": true }, + "fast-redact": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.1.2.tgz", + "integrity": "sha512-+0em+Iya9fKGfEQGcd62Yv6onjBmmhV1uh86XVfOU8VwAe6kaFdQCWI9s0/Nnugx5Vd9tdbZ7e6gE2tR9dzXdw==" + }, + "fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, "fastq": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", @@ -16820,11 +16811,6 @@ "bser": "2.1.1" } }, - "fecha": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.2.tgz", - "integrity": "sha512-5rOQWkBVz3FnYWTi/ELZmq4CoK1Pb+xKNZWuJRsOwo0+8DrP43CrWJtyLVvb5U7z7ggE5llahfDbLjaVNzXVJQ==" - }, "file-entry-cache": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", @@ -16917,11 +16903,6 @@ "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==", "dev": true }, - "fn.name": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz", - "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" - }, "follow-redirects": { "version": "1.14.9", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz", @@ -16982,8 +16963,7 @@ "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "devOptional": true + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "fsevents": { "version": "2.3.2", @@ -17236,6 +17216,45 @@ "minimalistic-assert": "^1.0.1" } }, + "help-me": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/help-me/-/help-me-4.2.0.tgz", + "integrity": "sha512-TAOnTB8Tz5Dw8penUuzHVrKNKlCIbwwbHnXraNJxPwf8LRtE2HlM84RYuezMFcwOJmoYOCWVDyJ8TQGxn9PgxA==", + "requires": { + "glob": "^8.0.0", + "readable-stream": "^3.6.0" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "requires": { + "balanced-match": "^1.0.0" + } + }, + "glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + } + }, + "minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "requires": { + "brace-expansion": "^2.0.1" + } + } + } + }, "hmac-drbg": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", @@ -17460,7 +17479,6 @@ "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "devOptional": true, "requires": { "once": "^1.3.0", "wrappy": "1" @@ -17648,7 +17666,8 @@ "is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==" + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true }, "is-string": { "version": "1.0.7", @@ -18292,6 +18311,11 @@ "integrity": "sha1-o6vicYryQaKykE+EpiWXDzia4yo=", "dev": true }, + "joycon": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", + "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==" + }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -18443,11 +18467,6 @@ "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", "dev": true }, - "kuler": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz", - "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==" - }, "leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", @@ -18523,18 +18542,6 @@ "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=", "dev": true }, - "logform": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/logform/-/logform-2.4.0.tgz", - "integrity": "sha512-CPSJw4ftjf517EhXZGGvTHHkYobo7ZCc0kvwUoOYcjfR2UVrI66RHj8MCrfAdEitdmFqbu2BYdYs8FHHZSb6iw==", - "requires": { - "@colors/colors": "1.5.0", - "fecha": "^4.2.0", - "ms": "^2.1.1", - "safe-stable-stringify": "^2.3.1", - "triple-beam": "^1.3.0" - } - }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -19107,6 +19114,11 @@ "integrity": "sha512-fZ4qZdQ2nxJvtcasX7Ghl+WlWS/d9IgnBIwFZXVNNZUmzpno91SX5bc5vuxiuKoCtK78XxGGNuSCrDC7xYB3OQ==", "dev": true }, + "on-exit-leak-free": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.0.tgz", + "integrity": "sha512-VuCaZZAjReZ3vUwgOB8LxAosIurDiAW0s13rI1YwmaP++jvcxP77AWoQvenZebpCA2m8WC1/EosPYPMjnRAp/w==" + }, "on-finished": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", @@ -19119,19 +19131,10 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "devOptional": true, "requires": { "wrappy": "1" } }, - "one-time": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz", - "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==", - "requires": { - "fn.name": "1.x.x" - } - }, "onetime": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", @@ -19409,6 +19412,119 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" }, + "pino": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/pino/-/pino-8.11.0.tgz", + "integrity": "sha512-Z2eKSvlrl2rH8p5eveNUnTdd4AjJk8tAsLkHYZQKGHP4WTh2Gi1cOSOs3eWPqaj+niS3gj4UkoreoaWgF3ZWYg==", + "requires": { + "atomic-sleep": "^1.0.0", + "fast-redact": "^3.1.1", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "v1.0.0", + "pino-std-serializers": "^6.0.0", + "process-warning": "^2.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^3.1.0", + "thread-stream": "^2.0.0" + } + }, + "pino-abstract-transport": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-1.0.0.tgz", + "integrity": "sha512-c7vo5OpW4wIS42hUVcT5REsL8ZljsUfBjqV/e2sFxmFEFZiq1XLUp5EYLtuDH6PEHq9W1egWqRbnLUP5FuZmOA==", + "requires": { + "readable-stream": "^4.0.0", + "split2": "^4.0.0" + }, + "dependencies": { + "buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "readable-stream": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.3.0.tgz", + "integrity": "sha512-MuEnA0lbSi7JS8XM+WNJlWZkHAAdm7gETHdFK//Q/mChGyj2akEFtdLZh32jSdkWGbRwCW9pn6g3LWDdDeZnBQ==", + "requires": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10" + } + }, + "split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==" + } + } + }, + "pino-http": { + "version": "8.3.3", + "resolved": "https://registry.npmjs.org/pino-http/-/pino-http-8.3.3.tgz", + "integrity": "sha512-p4umsNIXXVu95HD2C8wie/vXH7db5iGRpc+yj1/ZQ3sRtTQLXNjoS6Be5+eI+rQbqCRxen/7k/KSN+qiZubGDw==", + "requires": { + "get-caller-file": "^2.0.5", + "pino": "^8.0.0", + "pino-std-serializers": "^6.0.0", + "process-warning": "^2.0.0" + } + }, + "pino-pretty": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-10.0.0.tgz", + "integrity": "sha512-zKFjYXBzLaLTEAN1ayKpHXtL5UeRQC7R3lvhKe7fWs7hIVEjKGG/qIXwQt9HmeUp71ogUd/YcW+LmMwRp4KT6Q==", + "requires": { + "colorette": "^2.0.7", + "dateformat": "^4.6.3", + "fast-copy": "^3.0.0", + "fast-safe-stringify": "^2.1.1", + "help-me": "^4.0.1", + "joycon": "^3.1.1", + "minimist": "^1.2.6", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^1.0.0", + "pump": "^3.0.0", + "readable-stream": "^4.0.0", + "secure-json-parse": "^2.4.0", + "sonic-boom": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "dependencies": { + "buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "readable-stream": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.3.0.tgz", + "integrity": "sha512-MuEnA0lbSi7JS8XM+WNJlWZkHAAdm7gETHdFK//Q/mChGyj2akEFtdLZh32jSdkWGbRwCW9pn6g3LWDdDeZnBQ==", + "requires": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10" + } + } + } + }, + "pino-std-serializers": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-6.2.0.tgz", + "integrity": "sha512-IWgSzUL8X1w4BIWTwErRgtV8PyOGOOi60uqv0oKuS/fOA8Nco/OeI6lBuc4dyP8MMfdFwyHqTMcBIA7nDiqEqA==" + }, "pirates": { "version": "4.0.5", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", @@ -19540,12 +19656,22 @@ } } }, + "process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==" + }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", "dev": true }, + "process-warning": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-2.2.0.tgz", + "integrity": "sha512-/1WZ8+VQjR6avWOgHeEPd7SDQmFQ1B5mC1eRXsCm5TarlNmx/wCsa5GEaxGm05BORRtyG/Ex/3xq3TuRvq57qg==" + }, "progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", @@ -19600,7 +19726,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, "requires": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -19633,6 +19758,11 @@ "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", "dev": true }, + "quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==" + }, "quick-lru": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", @@ -19750,6 +19880,11 @@ "picomatch": "^2.2.1" } }, + "real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==" + }, "redent": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", @@ -20070,6 +20205,11 @@ "xmlchars": "^2.2.0" } }, + "secure-json-parse": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", + "integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==" + }, "semver": { "version": "7.3.2", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", @@ -20203,21 +20343,6 @@ "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "devOptional": true }, - "simple-swizzle": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", - "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", - "requires": { - "is-arrayish": "^0.3.1" - }, - "dependencies": { - "is-arrayish": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" - } - } - }, "simple-update-notifier": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-1.0.7.tgz", @@ -20286,6 +20411,14 @@ "debug": "~4.3.1" } }, + "sonic-boom": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-3.3.0.tgz", + "integrity": "sha512-LYxp34KlZ1a2Jb8ZQgFCK3niIHzibdwtwNUWKg0qQRzsDoJ3Gfgkf8KdBTFU3SkejDEIlWwnSnpVdOZIhFMl/g==", + "requires": { + "atomic-sleep": "^1.0.0" + } + }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", @@ -20368,11 +20501,6 @@ "tweetnacl": "~0.14.0" } }, - "stack-trace": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", - "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" - }, "stack-utils": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", @@ -20513,8 +20641,7 @@ "strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==" }, "superagent": { "version": "3.8.3", @@ -20675,17 +20802,20 @@ "integrity": "sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==", "dev": true }, - "text-hex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", - "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" - }, "text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", "dev": true }, + "thread-stream": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-2.3.0.tgz", + "integrity": "sha512-kaDqm1DET9pp3NXwR8382WHbnpXnRkN9xGN9dQt3B2+dmXiW8X1SOwmFOxAErEQ47ObhZ96J6yhZNXuyCOL7KA==", + "requires": { + "real-require": "^0.2.0" + } + }, "throat": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/throat/-/throat-6.0.1.tgz", @@ -20792,11 +20922,6 @@ "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", "dev": true }, - "triple-beam": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz", - "integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==" - }, "ts-essentials": { "version": "9.0.0", "resolved": "https://registry.npmjs.org/ts-essentials/-/ts-essentials-9.0.0.tgz", @@ -21348,33 +21473,6 @@ } } }, - "winston": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/winston/-/winston-3.6.0.tgz", - "integrity": "sha512-9j8T75p+bcN6D00sF/zjFVmPp+t8KMPB1MzbbzYjeN9VWxdsYnTB40TkbNUEXAmILEfChMvAMgidlX64OG3p6w==", - "requires": { - "@dabh/diagnostics": "^2.0.2", - "async": "^3.2.3", - "is-stream": "^2.0.0", - "logform": "^2.4.0", - "one-time": "^1.0.0", - "readable-stream": "^3.4.0", - "safe-stable-stringify": "^2.3.1", - "stack-trace": "0.0.x", - "triple-beam": "^1.3.0", - "winston-transport": "^4.5.0" - } - }, - "winston-transport": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.5.0.tgz", - "integrity": "sha512-YpZzcUzBedhlTAfJg6vJDlyEai/IFMIVcaEZZyl3UXIl4gmqRpU7AE89AHLkbzLUsv0NVmw7ts+iztqKxxPW1Q==", - "requires": { - "logform": "^2.3.2", - "readable-stream": "^3.6.0", - "triple-beam": "^1.3.0" - } - }, "word-wrap": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", @@ -21394,8 +21492,7 @@ "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "devOptional": true + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" }, "write-file-atomic": { "version": "3.0.3", diff --git a/package.json b/package.json index 4aec26f8..0eb0651d 100644 --- a/package.json +++ b/package.json @@ -165,7 +165,6 @@ "evt": "1.10.1", "express": "4.17.3", "express-list-endpoints": "5.0.0", - "express-winston": "4.2.0", "getopts": "2.3.0", "http-proxy-middleware": "2.0.1", "jsonc-parser": "3.0.0", @@ -179,6 +178,9 @@ "pg": "8.7.1", "pg-copy-streams": "5.1.1", "pg-cursor": "2.7.1", + "pino": "8.11.0", + "pino-http": "8.3.3", + "pino-pretty": "10.0.0", "postgres": "3.3.1", "prom-client": "14.0.1", "rpc-bitcoin": "2.0.0", @@ -191,7 +193,6 @@ "ts-unused-exports": "7.0.3", "typescript": "4.6.2", "uuid": "8.3.2", - "winston": "3.6.0", "ws": "7.5.9", "zone-file": "2.0.0-beta.3" }, diff --git a/src/api/controllers/cache-controller.ts b/src/api/controllers/cache-controller.ts index 3b9642aa..b8d6e6d8 100644 --- a/src/api/controllers/cache-controller.ts +++ b/src/api/controllers/cache-controller.ts @@ -1,8 +1,9 @@ import { RequestHandler, Request, Response } from 'express'; import * as prom from 'prom-client'; -import { logger, normalizeHashString, sha256 } from '../../helpers'; +import { normalizeHashString, sha256 } from '../../helpers'; import { asyncHandler } from '../async-handler'; import { PgStore } from '../../datastore/pg-store'; +import { logger } from '../../logger'; const CACHE_OK = Symbol('cache_ok'); @@ -258,7 +259,7 @@ async function calculateETag( } return chainTip.result.microblockHash ?? chainTip.result.indexBlockHash; } catch (error) { - logger.error(`Unable to calculate chain_tip ETag: ${error}`); + logger.error(error, 'Unable to calculate chain_tip ETag'); return; } @@ -276,7 +277,7 @@ async function calculateETag( } return digest.result.digest; } catch (error) { - logger.error(`Unable to calculate mempool ETag: ${error}`); + logger.error(error, 'Unable to calculate mempool'); return; } @@ -299,7 +300,7 @@ async function calculateETag( ]; return sha256(elements.join(':')); } catch (error) { - logger.error(`Unable to calculate transaction ETag: ${error}`); + logger.error(error, 'Unable to calculate transaction'); return; } } diff --git a/src/api/controllers/db-controller.ts b/src/api/controllers/db-controller.ts index 8a63c4da..55dbac91 100644 --- a/src/api/controllers/db-controller.ts +++ b/src/api/controllers/db-controller.ts @@ -61,11 +61,12 @@ import { StxUnlockEvent, DbPox2Event, } from '../../datastore/common'; -import { unwrapOptional, FoundOrNot, logger, unixEpochToIso, EMPTY_HASH_256 } from '../../helpers'; +import { unwrapOptional, FoundOrNot, unixEpochToIso, EMPTY_HASH_256 } from '../../helpers'; import { serializePostCondition, serializePostConditionMode } from '../serializers/post-conditions'; import { getOperations, parseTransactionMemo } from '../../rosetta-helpers'; import { PgStore } from '../../datastore/pg-store'; import { Pox2EventName } from '../../pox-helpers'; +import { logger } from '../../logger'; export function parseTxTypeStrings(values: string[]): TransactionType[] { return values.map(v => { diff --git a/src/api/init.ts b/src/api/init.ts index ae9d27e9..8f5a7151 100644 --- a/src/api/init.ts +++ b/src/api/init.ts @@ -1,8 +1,6 @@ import { Server, createServer } from 'http'; import { Socket } from 'net'; import * as express from 'express'; -import * as expressWinston from 'express-winston'; -import * as winston from 'winston'; import { v4 as uuid } from 'uuid'; import * as cors from 'cors'; @@ -21,7 +19,7 @@ import { createRosettaMempoolRouter } from './routes/rosetta/mempool'; import { createRosettaBlockRouter } from './routes/rosetta/block'; import { createRosettaAccountRouter } from './routes/rosetta/account'; import { createRosettaConstructionRouter } from './routes/rosetta/construction'; -import { apiDocumentationUrl, isProdEnv, logError, logger, LogLevel, waiter } from '../helpers'; +import { apiDocumentationUrl, isProdEnv, waiter } from '../helpers'; import { InvalidRequestError } from '../errors'; import { createBurnchainRouter } from './routes/burnchain'; import { createBnsNamespacesRouter } from './routes/bns/namespaces'; @@ -48,6 +46,7 @@ import { WebSocketTransmitter } from './routes/ws/web-socket-transmitter'; import { createPox2EventsRouter } from './routes/pox2'; import { isPgConnectionError } from '../datastore/helpers'; import { createStackingRouter } from './routes/stacking'; +import { logger, loggerMiddleware } from '../logger'; export interface ApiServer { expressApp: express.Express; @@ -70,9 +69,8 @@ export async function startApiServer(opts: { serverHost?: string; /** If not specified, this is read from the STACKS_BLOCKCHAIN_API_PORT env var. */ serverPort?: number; - httpLogLevel?: LogLevel; }): Promise { - const { datastore, writeDatastore, chainId, serverHost, serverPort, httpLogLevel } = opts; + const { datastore, writeDatastore, chainId, serverHost, serverPort } = opts; try { const [branch, commit, tag] = fs.readFileSync('.git-info', 'utf-8').split('\n'); @@ -80,7 +78,7 @@ export async function startApiServer(opts: { API_VERSION.commit = commit; API_VERSION.tag = tag; } catch (error) { - logger.error(`Unable to read API version from .git-info`, error); + logger.error(error, `Unable to read API version from .git-info`); } const app = express(); @@ -150,15 +148,9 @@ export async function startApiServer(opts: { res.append('Access-Control-Expose-Headers', 'X-API-Version'); next(); }); - // Setup request logging - app.use( - expressWinston.logger({ - format: logger.format, - transports: logger.transports, - metaField: (null as unknown) as string, - statusLevels: true, - }) - ); + + // Common logger middleware for the whole API. + app.use(loggerMiddleware); app.set('json spaces', 2); @@ -295,6 +287,7 @@ export async function startApiServer(opts: { app.use(((error, req, res, next) => { if (req.method === 'GET' && res.statusCode !== 200 && res.hasHeader('ETag')) { logger.error( + error, `Non-200 request has ETag: ${res.header('ETag')}, Cache-Control: ${res.header( 'Cache-Control' )}` @@ -302,6 +295,7 @@ export async function startApiServer(opts: { } if (error && res.headersSent && res.statusCode !== 200 && res.hasHeader('ETag')) { logger.error( + error, `A non-200 response with an error in request processing has ETag: ${res.header( 'ETag' )}, Cache-Control: ${res.header('Cache-Control')}` @@ -312,6 +306,7 @@ export async function startApiServer(opts: { } if (error && !res.headersSent) { if (error instanceof InvalidRequestError) { + logger.warn(error, error.message); res.status(error.status).json({ error: error.message }).end(); } else if (isPgConnectionError(error)) { res.status(503).json({ error: `The database service is unavailable` }).end(); @@ -327,18 +322,6 @@ export async function startApiServer(opts: { next(error); }) as express.ErrorRequestHandler); - app.use( - expressWinston.errorLogger({ - winstonInstance: logger as winston.Logger, - metaField: (null as unknown) as string, - blacklistedMetaFields: ['trace', 'os', 'process'], - skip: (_req, _res, error) => { - // Do not log errors for client 4xx responses - return error instanceof InvalidRequestError; - }, - }) - ); - // Store all the registered express routes for usage with metrics reporting routes = expressListEndpoints(app).map(endpoint => ({ path: endpoint.path, @@ -402,7 +385,7 @@ export async function startApiServer(opts: { logger.info('Closing WebSocket channels...'); ws.close(error => { if (error) { - logError('Failed to gracefully close WebSocket channels', error); + logger.error(error, 'Failed to gracefully close WebSocket channels'); reject(error); } else { logger.info('API WebSocket channels closed.'); diff --git a/src/api/rosetta-validate.ts b/src/api/rosetta-validate.ts index 79ba1efc..8638d956 100644 --- a/src/api/rosetta-validate.ts +++ b/src/api/rosetta-validate.ts @@ -1,5 +1,5 @@ import * as Ajv from 'ajv'; -import { hexToBuffer, logger, has0xPrefix, isValidC32Address, isValidPrincipal } from '../helpers'; +import { hexToBuffer, has0xPrefix, isValidC32Address, isValidPrincipal } from '../helpers'; import { RosettaConstants, RosettaErrors, @@ -12,6 +12,7 @@ import { import * as T from '@stacks/stacks-blockchain-api-types'; import { dereferenceSchema, getDocSchemaFile } from './validate'; import { ChainID } from '@stacks/transactions'; +import { logger } from '../logger'; export interface ValidSchema { valid: boolean; diff --git a/src/api/routes/address.ts b/src/api/routes/address.ts index 02c6056c..7bf883ab 100644 --- a/src/api/routes/address.ts +++ b/src/api/routes/address.ts @@ -16,7 +16,6 @@ import { isProdEnv, isValidC32Address, isValidPrincipal, - logger, } from '../../helpers'; import { getAssetEventTypeString, @@ -50,7 +49,7 @@ import { setETagCacheHeaders, } from '../controllers/cache-controller'; import { PgStore } from '../../datastore/pg-store'; -import { PgSqlClient } from '../../datastore/connection'; +import { logger } from '../../logger'; async function getBlockHeight( untilBlock: number | string | undefined, @@ -214,15 +213,7 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout const principal = req.params['principal']; validatePrincipal(principal); const untilBlock = parseUntilBlockQuery(req, res, next); - - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const response = await db.sqlTransaction(async sql => { @@ -326,14 +317,7 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout blockHeight = await getBlockHeight(untilBlock, req, res, next, db); } - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const { results: txResults, total } = await db.getAddressTxsWithAssetTransfers({ stxAddress: stxAddress, @@ -406,14 +390,7 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout validatePrincipal(stxAddress); const untilBlock = parseUntilBlockQuery(req, res, next); - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Event, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Event, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const response = await db.sqlTransaction(async sql => { @@ -466,14 +443,7 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout blockHeight = await getBlockHeight(untilBlock, req, res, next, db); } - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const { results, total } = await db.getInboundTransfers({ stxAddress, @@ -503,7 +473,7 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout setETagCacheHeaders(res); res.json(response); } catch (error) { - logger.error(`Unable to get inbound transfers for ${stxAddress}`, error); + logger.error(error, `Unable to get inbound transfers for ${stxAddress}`); throw error; } }) @@ -520,14 +490,7 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout const stxAddress = req.params['stx_address']; validatePrincipal(stxAddress); - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Event, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Event, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const includeUnanchored = isUnanchoredRequest(req, res, next); const untilBlock = parseUntilBlockQuery(req, res, next); @@ -577,16 +540,8 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout '/:address/mempool', mempoolCacheHandler, asyncHandler(async (req, res, next) => { - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); - const address = req.params['address']; if (!isValidC32Address(address)) { throw new InvalidRequestError( @@ -594,7 +549,6 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout InvalidRequestErrorType.invalid_param ); } - const includeUnanchored = isUnanchoredRequest(req, res, next); const { results: txResults, total } = await db.getMempoolTxList({ offset, @@ -602,7 +556,6 @@ export function createAddressRouter(db: PgStore, chainId: ChainID): express.Rout address, includeUnanchored, }); - const results = txResults.map(tx => parseDbMempoolTx(tx)); const response: MempoolTransactionListResponse = { limit, offset, total, results }; if (!isProdEnv) { diff --git a/src/api/routes/block.ts b/src/api/routes/block.ts index 43954b50..c9bcc62d 100644 --- a/src/api/routes/block.ts +++ b/src/api/routes/block.ts @@ -16,14 +16,7 @@ export function createBlockRouter(db: PgStore): express.Router { '/', cacheHandler, asyncHandler(async (req, res) => { - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Block, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Block, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const { results, total } = await getBlocksWithMetadata({ offset, limit, db }); diff --git a/src/api/routes/bns/pricing.ts b/src/api/routes/bns/pricing.ts index f45318ae..860e07f7 100644 --- a/src/api/routes/bns/pricing.ts +++ b/src/api/routes/bns/pricing.ts @@ -14,9 +14,10 @@ import { BnsGetNamePriceResponse, BnsGetNamespacePriceResponse, } from '@stacks/stacks-blockchain-api-types'; -import { isValidPrincipal, logger } from './../../../helpers'; +import { isValidPrincipal } from './../../../helpers'; import { PgStore } from '../../../datastore/pg-store'; import { getBnsContractID, GetStacksNetwork } from '../../../event-stream/bns/bns-helpers'; +import { logger } from '../../../logger'; export function createBnsPriceRouter(db: PgStore, chainId: ChainID): express.Router { const router = express.Router(); diff --git a/src/api/routes/burnchain.ts b/src/api/routes/burnchain.ts index 2d7043db..668af7b4 100644 --- a/src/api/routes/burnchain.ts +++ b/src/api/routes/burnchain.ts @@ -19,14 +19,7 @@ export function createBurnchainRouter(db: PgStore): express.Router { router.get( '/reward_slot_holders', asyncHandler(async (req, res) => { - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Burnchain, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Burnchain, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const queryResults = await db.getBurnchainRewardSlotHolders({ offset, limit }); diff --git a/src/api/routes/contract.ts b/src/api/routes/contract.ts index 123e23ad..531511ec 100644 --- a/src/api/routes/contract.ts +++ b/src/api/routes/contract.ts @@ -12,15 +12,7 @@ export function createContractRouter(db: PgStore): express.Router { '/by_trait', asyncHandler(async (req, res, next) => { const trait_abi = parseTraitAbi(req, res, next); - - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Contract, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Contract, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const smartContracts = await db.getSmartContractByTrait({ trait: trait_abi, @@ -60,15 +52,7 @@ export function createContractRouter(db: PgStore): express.Router { '/:contract_id/events', asyncHandler(async (req, res) => { const { contract_id } = req.params; - - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Contract, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Contract, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const eventsQuery = await db.getSmartContractEvents({ contractId: contract_id, diff --git a/src/api/routes/core-node-rpc-proxy.ts b/src/api/routes/core-node-rpc-proxy.ts index 108a5424..c14f3a5e 100644 --- a/src/api/routes/core-node-rpc-proxy.ts +++ b/src/api/routes/core-node-rpc-proxy.ts @@ -1,7 +1,7 @@ import * as express from 'express'; import * as cors from 'cors'; import { createProxyMiddleware, Options, responseInterceptor } from 'http-proxy-middleware'; -import { logError, logger, parsePort, pipelineAsync, REPO_DIR } from '../../helpers'; +import { parsePort, pipelineAsync, REPO_DIR } from '../../helpers'; import { Agent } from 'http'; import * as fs from 'fs'; import * as path from 'path'; @@ -10,6 +10,7 @@ import * as chokidar from 'chokidar'; import * as jsoncParser from 'jsonc-parser'; import fetch, { RequestInit } from 'node-fetch'; import { PgStore } from '../../datastore/pg-store'; +import { logger } from '../../logger'; function GetStacksNodeProxyEndpoint() { // Use STACKS_CORE_PROXY env vars if available, otherwise fallback to `STACKS_CORE_RPC @@ -62,7 +63,7 @@ export function createCoreNodeRpcProxyRouter(db: PgStore): express.Router { ); } catch (error) { pathCacheOptions.clear(); - logger.error(`Error reading changes from ${proxyCacheControlFile}`, error); + logger.error(error, `Error reading changes from ${proxyCacheControlFile}`); } }; updatePathCacheOptions(); @@ -96,7 +97,7 @@ export function createCoreNodeRpcProxyRouter(db: PgStore): express.Router { try { fileContents = await fs.promises.readFile(filePath, { encoding: 'utf8' }); } catch (error) { - logError(`Error reading ${STACKS_API_EXTRA_TX_ENDPOINTS_FILE_ENV_VAR}: ${error}`, error); + logger.error(error, `Error reading ${STACKS_API_EXTRA_TX_ENDPOINTS_FILE_ENV_VAR}`); return false; } const endpoints = fileContents @@ -161,7 +162,7 @@ export function createCoreNodeRpcProxyRouter(db: PgStore): express.Router { first_broadcast_at_stacks_height: blockHeight, }); } catch (error) { - logError(`Error logging tx broadcast: ${error}`, error); + logger.error(error, 'Error logging tx broadcast'); } } @@ -203,7 +204,10 @@ export function createCoreNodeRpcProxyRouter(db: PgStore): express.Router { // to the extra endpoints are logged. results.slice(1).forEach(p => { if (p.status === 'rejected') { - logError(`Error during POST /v2/transaction to extra endpoint: ${p.reason}`, p.reason); + logger.error( + p.reason, + `Error during POST /v2/transaction to extra endpoint: ${p.reason}` + ); } else { if (!p.value.ok) { logger.warn( @@ -216,9 +220,9 @@ export function createCoreNodeRpcProxyRouter(db: PgStore): express.Router { // Proxy the result of the (non-extra) http response back to the client. const mainResult = results[0]; if (mainResult.status === 'rejected') { - logError( - `Error in primary POST /v2/transaction proxy: ${mainResult.reason}`, - mainResult.reason + logger.error( + mainResult.reason, + `Error in primary POST /v2/transaction proxy: ${mainResult.reason}` ); res.status(500).json({ error: mainResult.reason }); } else { diff --git a/src/api/routes/faucets.ts b/src/api/routes/faucets.ts index 179716c1..c3656c67 100644 --- a/src/api/routes/faucets.ts +++ b/src/api/routes/faucets.ts @@ -13,12 +13,13 @@ import { import { StacksNetwork, StacksTestnet } from '@stacks/network'; import { makeBtcFaucetPayment, getBtcBalance } from '../../btc-faucet'; import { DbFaucetRequestCurrency } from '../../datastore/common'; -import { intMax, logger, stxToMicroStx } from '../../helpers'; +import { intMax, stxToMicroStx } from '../../helpers'; import { testnetKeys, getStacksTestnetNetwork } from './debug'; import { StacksCoreRpcClient } from '../../core-rpc/client'; import { RunFaucetResponse } from '@stacks/stacks-blockchain-api-types'; import { PgWriteStore } from '../../datastore/pg-write-store'; import { BtcFaucetConfigError } from '../../errors'; +import { logger } from '../../logger'; export function getStxFaucetNetworks(): StacksNetwork[] { const networks: StacksNetwork[] = [getStacksTestnetNetwork()]; diff --git a/src/api/routes/microblock.ts b/src/api/routes/microblock.ts index 697a0937..f5a36334 100644 --- a/src/api/routes/microblock.ts +++ b/src/api/routes/microblock.ts @@ -21,14 +21,7 @@ export function createMicroblockRouter(db: PgStore): express.Router { router.get( '/', asyncHandler(async (req, res) => { - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Microblock, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Microblock, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const query = await getMicroblocksFromDataStore({ db, offset, limit }); const response: MicroblockListResponse = { diff --git a/src/api/routes/pox2.ts b/src/api/routes/pox2.ts index 6ecb10e9..565fa6fe 100644 --- a/src/api/routes/pox2.ts +++ b/src/api/routes/pox2.ts @@ -21,14 +21,7 @@ export function createPox2EventsRouter(db: PgStore): express.Router { router.get( '/', asyncHandler(async (req, res) => { - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Pox2Event, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Pox2Event, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const queryResults = await db.getPox2Events({ offset, limit }); diff --git a/src/api/routes/stacking.ts b/src/api/routes/stacking.ts index 5327490a..24e482b8 100644 --- a/src/api/routes/stacking.ts +++ b/src/api/routes/stacking.ts @@ -18,14 +18,7 @@ export function createStackingRouter(db: PgStore): express.Router { const poolPrincipal = req.params['pool_principal']; validatePrincipal(poolPrincipal); - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Stacker, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Stacker, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const afterBlock = getBlockHeightQueryParam('after_block', false, req, res, next) || 0; diff --git a/src/api/routes/status.ts b/src/api/routes/status.ts index 6a9d9572..e7f78efb 100644 --- a/src/api/routes/status.ts +++ b/src/api/routes/status.ts @@ -1,7 +1,5 @@ import * as express from 'express'; -import * as fs from 'fs'; import { ServerStatusResponse } from '@stacks/stacks-blockchain-api-types'; -import { logger } from '../../helpers'; import { getETagCacheHandler, setETagCacheHeaders } from '../controllers/cache-controller'; import { PgStore } from '../../datastore/pg-store'; import { API_VERSION } from '../init'; diff --git a/src/api/routes/tokens/tokens.ts b/src/api/routes/tokens/tokens.ts index 4e35d455..500e9491 100644 --- a/src/api/routes/tokens/tokens.ts +++ b/src/api/routes/tokens/tokens.ts @@ -53,14 +53,7 @@ export function createTokenRouter(db: PgStore): express.Router { } } - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Token, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Token, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const includeUnanchored = isUnanchoredRequest(req, res, next); const includeTxMetadata = booleanValueForParam(req, res, next, 'tx_metadata'); @@ -122,14 +115,7 @@ export function createTokenRouter(db: PgStore): express.Router { } const strValue = value; - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Token, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Token, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const includeUnanchored = isUnanchoredRequest(req, res, next); const includeTxMetadata = booleanValueForParam(req, res, next, 'tx_metadata'); @@ -191,14 +177,7 @@ export function createTokenRouter(db: PgStore): express.Router { return; } - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Token, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Token, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const includeUnanchored = isUnanchoredRequest(req, res, next); const includeTxMetadata = booleanValueForParam(req, res, next, 'tx_metadata'); @@ -259,14 +238,7 @@ export function createTokenRouter(db: PgStore): express.Router { return; } - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Token, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Token, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const { results, total } = await db.getFtMetadataList({ offset, limit }); diff --git a/src/api/routes/tx.ts b/src/api/routes/tx.ts index a1b76428..40e7969b 100644 --- a/src/api/routes/tx.ts +++ b/src/api/routes/tx.ts @@ -43,14 +43,7 @@ export function createTxRouter(db: PgStore): express.Router { '/', cacheHandler, asyncHandler(async (req, res, next) => { - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const typeQuery = req.query.type; @@ -93,14 +86,7 @@ export function createTxRouter(db: PgStore): express.Router { } const txList: string[] = req.query.tx_id as string[]; - let eventLimit: number; - try { - eventLimit = getPagingQueryLimit(ResourceType.Tx, req.query['event_limit']); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const eventLimit = getPagingQueryLimit(ResourceType.Tx, req.query['event_limit']); const eventOffset = parsePagingQueryInput(req.query['event_offset'] ?? 0); const includeUnanchored = isUnanchoredRequest(req, res, next); txList.forEach(tx => validateRequestHexInput(tx)); @@ -125,14 +111,7 @@ export function createTxRouter(db: PgStore): express.Router { '/mempool', mempoolCacheHandler, asyncHandler(async (req, res, next) => { - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); let addrParams: (string | undefined)[]; @@ -193,14 +172,7 @@ export function createTxRouter(db: PgStore): express.Router { '/mempool/dropped', mempoolCacheHandler, asyncHandler(async (req, res) => { - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query.limit); const offset = parsePagingQueryInput(req.query.offset ?? 0); const { results: txResults, total } = await db.getDroppedTxs({ offset, @@ -227,14 +199,7 @@ export function createTxRouter(db: PgStore): express.Router { '/events', cacheHandler, asyncHandler(async (req, res, next) => { - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query['limit'], 100); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query['limit'], 100); const offset = parsePagingQueryInput(req.query['offset'] ?? 0); const principalOrTxId = parseAddressOrTxId(req, res, next); @@ -263,14 +228,7 @@ export function createTxRouter(db: PgStore): express.Router { return res.redirect('/extended/v1/tx/0x' + tx_id + url.search); } - let eventLimit: number; - try { - eventLimit = getPagingQueryLimit(ResourceType.Tx, req.query['event_limit'], 100); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const eventLimit = getPagingQueryLimit(ResourceType.Tx, req.query['event_limit'], 100); const eventOffset = parsePagingQueryInput(req.query['event_offset'] ?? 0); const includeUnanchored = isUnanchoredRequest(req, res, next); validateRequestHexInput(tx_id); @@ -320,14 +278,7 @@ export function createTxRouter(db: PgStore): express.Router { asyncHandler(async (req, res) => { const { block_hash } = req.params; - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query['limit'], 200); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query['limit'], 200); const offset = parsePagingQueryInput(req.query['offset'] ?? 0); validateRequestHexInput(block_hash); const result = await db.getTxsFromBlock({ hash: block_hash }, limit, offset); @@ -360,14 +311,7 @@ export function createTxRouter(db: PgStore): express.Router { asyncHandler(async (req, res, next) => { const height = getBlockHeightPathParam(req, res, next); - let limit: number; - try { - limit = getPagingQueryLimit(ResourceType.Tx, req.query['limit']); - } catch (error: any) { - res.status(400).json({ error: error.message }); - return; - } - + const limit = getPagingQueryLimit(ResourceType.Tx, req.query['limit']); const offset = parsePagingQueryInput(req.query['offset'] ?? 0); const result = await db.getTxsFromBlock({ height: height }, limit, offset); if (!result.found) { diff --git a/src/api/routes/ws/channels/socket-io-channel.ts b/src/api/routes/ws/channels/socket-io-channel.ts index c2e8917b..f8b0a7d4 100644 --- a/src/api/routes/ws/channels/socket-io-channel.ts +++ b/src/api/routes/ws/channels/socket-io-channel.ts @@ -9,7 +9,7 @@ import * as http from 'http'; import { Server as SocketIOServer } from 'socket.io'; import { Adapter } from 'socket.io-adapter'; import { isValidTxId } from '../../../../api/query-helpers'; -import { isProdEnv, isValidPrincipal, logger } from '../../../../helpers'; +import { isProdEnv, isValidPrincipal } from '../../../../helpers'; import { WebSocketPrometheus } from '../web-socket-prometheus'; import { ListenerType, @@ -22,6 +22,7 @@ import { getWsPingIntervalMs, getWsPingTimeoutMs, } from '../web-socket-transmitter'; +import { logger } from '../../../../logger'; const component = { component: 'socket-io' }; @@ -51,7 +52,7 @@ export class SocketIOChannel extends WebSocketChannel { this.io = io; io.on('connection', async socket => { - logger.verbose(`new connection: ${socket.id}`, component); + logger.debug(`new connection: ${socket.id}`, component); if (socket.handshake.headers['x-forwarded-for']) { this.prometheus?.connect(socket.handshake.headers['x-forwarded-for'] as string); } else { @@ -66,7 +67,7 @@ export class SocketIOChannel extends WebSocketChannel { } } socket.on('disconnect', reason => { - logger.verbose(`disconnected ${socket.id}: ${reason}`, component); + logger.debug(`disconnected ${socket.id}: ${reason}`, component); this.prometheus?.disconnect(socket); }); socket.on('subscribe', async (topic, callback) => { @@ -103,16 +104,16 @@ export class SocketIOChannel extends WebSocketChannel { const adapter = io.of('/').adapter; adapter.on('create-room', room => { - logger.verbose(`room created: ${room}`, component); + logger.debug(`room created: ${room}`, component); }); adapter.on('delete-room', room => { - logger.verbose(`room deleted: ${room}`, component); + logger.debug(`room deleted: ${room}`, component); }); adapter.on('join-room', (room, id) => { - logger.verbose(`socket ${id} joined room: ${room}`, component); + logger.debug(`socket ${id} joined room: ${room}`, component); }); adapter.on('leave-room', (room, id) => { - logger.verbose(`socket ${id} left room: ${room}`, component); + logger.debug(`socket ${id} left room: ${room}`, component); }); this.adapter = adapter; } diff --git a/src/api/routes/ws/channels/ws-rpc-channel.ts b/src/api/routes/ws/channels/ws-rpc-channel.ts index 8efc4c82..86e19384 100644 --- a/src/api/routes/ws/channels/ws-rpc-channel.ts +++ b/src/api/routes/ws/channels/ws-rpc-channel.ts @@ -4,7 +4,6 @@ import * as net from 'net'; import { isProdEnv, isValidPrincipal, - logError, normalizeHashString, resolveOrTimeout, } from '../../../../helpers'; @@ -43,6 +42,7 @@ import { NftEvent, } from '@stacks/stacks-blockchain-api-types'; import { getWsMessageTimeoutMs, getWsPingIntervalMs } from '../web-socket-transmitter'; +import { logger } from '../../../../logger'; type Subscription = | RpcTxUpdateSubscriptionParams @@ -587,7 +587,7 @@ export class WsRpcChannel extends WebSocketChannel { this.prometheus?.sendEvent('transaction'); } } catch (error) { - logError(`error sending websocket tx update for ${tx.tx_id}`, error); + logger.error(error, `error sending websocket tx update for ${tx.tx_id}`); } } @@ -616,7 +616,7 @@ export class WsRpcChannel extends WebSocketChannel { this.prometheus?.sendEvent('address-transaction'); } } catch (error) { - logError(`error sending websocket address tx updates to ${principal}`, error); + logger.error(error, `error sending websocket address tx updates to ${principal}`); } } @@ -641,7 +641,7 @@ export class WsRpcChannel extends WebSocketChannel { ); this.prometheus?.sendEvent('address-stx-balance'); } catch (error) { - logError(`error sending websocket stx balance update to ${principal}`, error); + logger.error(error, `error sending websocket stx balance update to ${principal}`); } } } @@ -661,7 +661,7 @@ export class WsRpcChannel extends WebSocketChannel { this.prometheus?.sendEvent('block'); } } catch (error) { - logError(`error sending websocket block updates`, error); + logger.error(error, `error sending websocket block updates`); } } @@ -680,7 +680,7 @@ export class WsRpcChannel extends WebSocketChannel { this.prometheus?.sendEvent('microblock'); } } catch (error) { - logError(`error sending websocket microblock updates`, error); + logger.error(error, `error sending websocket microblock updates`); } } @@ -699,7 +699,7 @@ export class WsRpcChannel extends WebSocketChannel { this.prometheus?.sendEvent('mempool'); } } catch (error) { - logError(`error sending websocket mempool updates`, error); + logger.error(error, `error sending websocket mempool updates`); } } @@ -718,7 +718,7 @@ export class WsRpcChannel extends WebSocketChannel { this.prometheus?.sendEvent('nft-event'); } } catch (error) { - logError(`error sending websocket nft-event updates`, error); + logger.error(error, `error sending websocket nft-event updates`); } } @@ -738,9 +738,9 @@ export class WsRpcChannel extends WebSocketChannel { this.prometheus?.sendEvent('nft-event'); } } catch (error) { - logError( - `error sending websocket nft-asset-event updates for ${assetIdentifier} ${value}`, - error + logger.error( + error, + `error sending websocket nft-asset-event updates for ${assetIdentifier} ${value}` ); } } @@ -763,9 +763,9 @@ export class WsRpcChannel extends WebSocketChannel { this.prometheus?.sendEvent('nft-event'); } } catch (error) { - logError( - `error sending websocket nft-collection-event updates for ${assetIdentifier}`, - error + logger.error( + error, + `error sending websocket nft-collection-event updates for ${assetIdentifier}` ); } } diff --git a/src/api/routes/ws/web-socket-transmitter.ts b/src/api/routes/ws/web-socket-transmitter.ts index d9090250..4e9dee45 100644 --- a/src/api/routes/ws/web-socket-transmitter.ts +++ b/src/api/routes/ws/web-socket-transmitter.ts @@ -13,7 +13,7 @@ import { ListenerType, WebSocketChannel, WebSocketPayload } from './web-socket-c import { SocketIOChannel } from './channels/socket-io-channel'; import { WsRpcChannel } from './channels/ws-rpc-channel'; import { parseNftEvent } from '../../../datastore/helpers'; -import { logger } from '../../../helpers'; +import { logger } from '../../../logger'; export function getWsPingIntervalMs(): number { return parseInt(process.env['STACKS_API_WS_PING_INTERVAL'] ?? '5') * 1000; @@ -58,27 +58,27 @@ export class WebSocketTransmitter { this.db.eventEmitter.addListener('blockUpdate', blockHash => this.queue .add(() => this.blockUpdate(blockHash)) - .catch(error => logger.error(`WebSocketTransmitter blockUpdate error: ${error}`)) + .catch(error => logger.error(error, 'WebSocketTransmitter blockUpdate error')) ); this.db.eventEmitter.addListener('microblockUpdate', microblockHash => this.queue .add(() => this.microblockUpdate(microblockHash)) - .catch(error => logger.error(`WebSocketTransmitter microblockUpdate error: ${error}`)) + .catch(error => logger.error(error, 'WebSocketTransmitter microblockUpdate error')) ); this.db.eventEmitter.addListener('nftEventUpdate', (txId, eventIndex) => this.queue .add(() => this.nftEventUpdate(txId, eventIndex)) - .catch(error => logger.error(`WebSocketTransmitter nftEventUpdate error: ${error}`)) + .catch(error => logger.error(error, 'WebSocketTransmitter nftEventUpdate error')) ); this.db.eventEmitter.addListener('txUpdate', txId => this.queue .add(() => this.txUpdate(txId)) - .catch(error => logger.error(`WebSocketTransmitter txUpdate error: ${error}`)) + .catch(error => logger.error(error, 'WebSocketTransmitter txUpdate error')) ); this.db.eventEmitter.addListener('addressUpdate', (address, blockHeight) => this.queue .add(() => this.addressUpdate(address, blockHeight)) - .catch(error => logger.error(`WebSocketTransmitter addressUpdate error: ${error}`)) + .catch(error => logger.error(error, 'WebSocketTransmitter addressUpdate error')) ); this.channels.push(new SocketIOChannel(this.server)); diff --git a/src/api/validate.ts b/src/api/validate.ts index 06203b9b..1b96e0ce 100644 --- a/src/api/validate.ts +++ b/src/api/validate.ts @@ -1,7 +1,8 @@ import * as path from 'path'; import * as Ajv from 'ajv'; import * as RefParser from '@apidevtools/json-schema-ref-parser'; -import { logger, getOrAddAsync, REPO_DIR } from '../helpers'; +import { getOrAddAsync, REPO_DIR } from '../helpers'; +import { logger } from '../logger'; const derefSchemaCache: Map = new Map(); export async function dereferenceSchema(schemaFilePath: string): Promise { diff --git a/src/btc-faucet.ts b/src/btc-faucet.ts index 887a891f..ee8bdd5b 100644 --- a/src/btc-faucet.ts +++ b/src/btc-faucet.ts @@ -2,10 +2,11 @@ import { RPCClient } from 'rpc-bitcoin'; import * as btc from 'bitcoinjs-lib'; import * as ecc from 'tiny-secp256k1'; import * as Bluebird from 'bluebird'; -import { parsePort, time, logger, logError } from './helpers'; +import { parsePort, time } from './helpers'; import * as coinselect from 'coinselect'; import { ECPair, ECPairInterface, validateSigFunction } from './ec-helpers'; import { BtcFaucetConfigError } from './errors'; +import { logger } from './logger'; function getFaucetPk(): string { const { BTC_FAUCET_PK } = process.env; @@ -92,10 +93,10 @@ export async function getBtcBalance(network: btc.Network, address: string) { async function getTxOutSet(client: RPCClient, address: string): Promise { const txOutSet: TxOutSet = await time( () => client.scantxoutset({ action: 'start', scanobjects: [`addr(${address})`] }), - ms => logger.verbose(`scantxoutset for ${address} took ${ms} ms`) + ms => logger.debug(`scantxoutset for ${address} took ${ms} ms`) ); if (!txOutSet.success) { - logError(`WARNING: scantxoutset did not immediately complete -- polling for progress...`); + logger.error('scantxoutset did not immediately complete -- polling for progress...'); let scanProgress = true; do { scanProgress = await client.scantxoutset({ @@ -137,7 +138,7 @@ async function getRawTransactions(client: RPCClient, txIds: string[]): Promise logger.verbose(`batch getrawtransaction for ${txIds.length} txs took ${ms} ms`) + ms => logger.debug(`batch getrawtransaction for ${txIds.length} txs took ${ms} ms`) ); return batchRawTxRes; } @@ -146,7 +147,7 @@ async function getSpendableUtxos(client: RPCClient, address: string): Promise client.getrawmempool(), - ms => logger.verbose(`getrawmempool took ${ms} ms`) + ms => logger.debug(`getrawmempool took ${ms} ms`) ); const rawTxs = await getRawTransactions(client, mempoolTxIds); const spentUtxos = rawTxs.map(tx => tx.vin).flat(); @@ -224,7 +225,7 @@ export async function makeBtcFaucetPayment( const txId = tx.getId(); const sendTxResult: string = await time( () => client.sendrawtransaction({ hexstring: txHex }), - ms => logger.verbose(`sendrawtransaction took ${ms}`) + ms => logger.debug(`sendrawtransaction took ${ms}`) ); if (sendTxResult !== txId) { diff --git a/src/core-rpc/client.ts b/src/core-rpc/client.ts index db3e1c7d..83bcb30e 100644 --- a/src/core-rpc/client.ts +++ b/src/core-rpc/client.ts @@ -1,8 +1,9 @@ /* eslint-disable @typescript-eslint/no-non-null-assertion */ import fetch, { RequestInit } from 'node-fetch'; -import { parsePort, stopwatch, logError, timeout } from '../helpers'; +import { parsePort, stopwatch, timeout } from '../helpers'; import { CoreNodeFeeResponse } from '@stacks/stacks-blockchain-api-types'; import { ClarityValue, cvToHex } from '@stacks/transactions'; +import { logger } from '../logger'; interface CoreRpcAccountInfo { /** Hex-prefixed uint128. */ @@ -170,7 +171,7 @@ export class StacksCoreRpcClient { // eslint-disable-next-line @typescript-eslint/no-unsafe-return return resultJson; } catch (error) { - logError(`Error parsing json: "${resultString}"`, error); + logger.error(error, `Error parsing json: "${resultString}"`); throw error; } } @@ -191,7 +192,7 @@ export class StacksCoreRpcClient { const resultString = await result.text(); return resultString; } catch (error) { - logError(`Error reading response from ${url}`, error); + logger.error(error, `Error reading response from ${url}`); throw error; } } diff --git a/src/datastore/connection-legacy.ts b/src/datastore/connection-legacy.ts index 9a63552f..09037e86 100644 --- a/src/datastore/connection-legacy.ts +++ b/src/datastore/connection-legacy.ts @@ -1,7 +1,8 @@ import { Client, ClientConfig, Pool, PoolClient, PoolConfig } from 'pg'; -import { logError, logger, parseArgBoolean, parsePort, stopwatch, timeout } from '../helpers'; +import { parseArgBoolean, parsePort, stopwatch, timeout } from '../helpers'; import { PgServer } from './connection'; import { isPgConnectionError } from './helpers'; +import { logger } from '../logger'; export type PgClientConfig = ClientConfig & { schema?: string }; type PgPoolConfig = PoolConfig & { schema?: string }; @@ -36,13 +37,13 @@ export async function connectPgPool({ } catch (error: any) { const pgConnectionError = isPgConnectionError(error); if (!pgConnectionError) { - logError('Cannot connect to pg', error); + logger.error(error, 'Cannot connect to pg'); throw error; } const timeElapsed = initTimer.getElapsed(); if (timeElapsed - lastElapsedLog > 2000) { lastElapsedLog = timeElapsed; - logError('Pg connection failed, retrying..'); + logger.error('Pg connection failed, retrying..'); } connectionError = error; await timeout(100); @@ -61,7 +62,7 @@ export async function connectPgPool({ }); const pool = new Pool(poolConfig); pool.on('error', error => { - logger.error(`Postgres connection pool error: ${error.message}`, error); + logger.error(error, `Postgres connection pool error: ${error.message}`); }); return pool; } diff --git a/src/datastore/connection.ts b/src/datastore/connection.ts index 928be64e..f0daae4e 100644 --- a/src/datastore/connection.ts +++ b/src/datastore/connection.ts @@ -1,6 +1,7 @@ -import { logError, parseArgBoolean, parsePort, stopwatch, timeout } from '../helpers'; +import { parseArgBoolean, parsePort, stopwatch, timeout } from '../helpers'; import * as postgres from 'postgres'; import { isPgConnectionError } from './helpers'; +import { logger } from '../logger'; export type PgSqlClient = postgres.Sql | postgres.TransactionSql; @@ -92,12 +93,12 @@ export async function connectPostgres({ const timeElapsed = initTimer.getElapsed(); if (timeElapsed - lastElapsedLog > 2000) { lastElapsedLog = timeElapsed; - logError(`Pg connection failed: ${error}, retrying..`); + logger.error(error, 'Pg connection failed. Retrying..'); } connectionError = error; await timeout(100); } else { - logError('Cannot connect to pg', error); + logger.error(error, 'Cannot connect to pg'); throw error; } } finally { diff --git a/src/datastore/helpers.ts b/src/datastore/helpers.ts index 1c9217dd..085e8214 100644 --- a/src/datastore/helpers.ts +++ b/src/datastore/helpers.ts @@ -1,4 +1,4 @@ -import { hexToBuffer, logError, parseEnum, unwrapOptionalProp } from '../helpers'; +import { hexToBuffer, parseEnum, unwrapOptionalProp } from '../helpers'; import { BlockQueryResult, ContractTxQueryResult, @@ -62,6 +62,7 @@ import { NftEvent } from 'docs/generated'; import { getAssetEventTypeString } from '../api/controllers/db-controller'; import { PgStoreEventEmitter } from './pg-store-event-emitter'; import { Pox2EventName } from '../pox-helpers'; +import { logger } from '../logger'; export const TX_COLUMNS = [ 'tx_id', @@ -1171,7 +1172,7 @@ export function registerMempoolPromStats(pgEvents: PgStoreEventEmitter) { try { updatePromMempoolStats(mempoolStats); } catch (error) { - logError(`Error updating prometheus mempool stats`, error); + logger.error(error, 'Error updating prometheus mempool stats'); } }); }); diff --git a/src/datastore/migrations.ts b/src/datastore/migrations.ts index 931cc49f..9cc7f45e 100644 --- a/src/datastore/migrations.ts +++ b/src/datastore/migrations.ts @@ -1,10 +1,11 @@ import * as path from 'path'; import PgMigrate, { RunnerOption } from 'node-pg-migrate'; import { Client } from 'pg'; -import { APP_DIR, isDevEnv, isTestEnv, logError, logger, REPO_DIR } from '../helpers'; +import { APP_DIR, isDevEnv, isTestEnv, REPO_DIR } from '../helpers'; import { getPgClientConfig, PgClientConfig } from './connection-legacy'; import { connectPostgres, PgServer } from './connection'; import { databaseHasData } from './event-requests'; +import { logger } from '../logger'; const MIGRATIONS_TABLE = 'pgmigrations'; const MIGRATIONS_DIR = path.join(REPO_DIR, 'migrations'); @@ -44,7 +45,7 @@ export async function runMigrations( } await PgMigrate(runnerOpts); } catch (error) { - logError(`Error running pg-migrate`, error); + logger.error(error, 'Error running pg-migrate'); throw error; } finally { await client.end(); diff --git a/src/datastore/pg-notifier.ts b/src/datastore/pg-notifier.ts index 9d9a3116..28b2758c 100644 --- a/src/datastore/pg-notifier.ts +++ b/src/datastore/pg-notifier.ts @@ -1,5 +1,5 @@ import * as postgres from 'postgres'; -import { logError, logger } from '../helpers'; +import { logger } from '../logger'; import { DbConfigState } from './common'; import { connectPostgres, PgServer, PgSqlClient } from './connection'; @@ -93,7 +93,7 @@ export class PgNotifier { () => logger.info(`PgNotifier connected, listening on channel: ${this.pgChannelName}`) ); } catch (error) { - logError('PgNotifier fatal connection error', error); + logger.error(error, 'PgNotifier fatal connection error'); throw error; } } @@ -153,7 +153,7 @@ export class PgNotifier { await this.sql .notify(this.pgChannelName, JSON.stringify(notification)) .catch(error => - logError(`PgNotifier error sending notification of type: ${notification.type}`, error) + logger.error(error, `PgNotifier error sending notification of type: ${notification.type}`) ); } } diff --git a/src/datastore/pg-write-store.ts b/src/datastore/pg-write-store.ts index 738be7fc..57660079 100644 --- a/src/datastore/pg-write-store.ts +++ b/src/datastore/pg-write-store.ts @@ -1,12 +1,4 @@ -import { - logger, - logError, - getOrAdd, - batchIterate, - isProdEnv, - I32_MAX, - getIbdBlockHeight, -} from '../helpers'; +import { getOrAdd, batchIterate, isProdEnv, I32_MAX, getIbdBlockHeight } from '../helpers'; import { DbBlock, DbTx, @@ -100,6 +92,7 @@ import { isProcessableTokenMetadata } from '../token-metadata/helpers'; import * as zoneFileParser from 'zone-file'; import { parseResolver, parseZoneFileTxt } from '../event-stream/bns/bns-helpers'; import { Pox2EventName } from '../pox-helpers'; +import { logger } from '../logger'; class MicroblockGapError extends Error { constructor(message: string) { @@ -245,7 +238,7 @@ export class PgWriteStore extends PgStore { const candidateTxIds = data.txs.map(d => d.tx.tx_id); const removedTxsResult = await this.pruneMempoolTxs(sql, candidateTxIds); if (removedTxsResult.removedTxs.length > 0) { - logger.verbose( + logger.debug( `Removed ${removedTxsResult.removedTxs.length} txs from mempool table during new block ingestion` ); } @@ -360,7 +353,7 @@ export class PgWriteStore extends PgStore { if (isCanonical && data.pox_v1_unlock_height !== undefined) { // update the pox_state.pox_v1_unlock_height singleton await sql` - UPDATE pox_state + UPDATE pox_state SET pox_v1_unlock_height = ${data.pox_v1_unlock_height} WHERE pox_v1_unlock_height != ${data.pox_v1_unlock_height} `; @@ -416,9 +409,7 @@ export class PgWriteStore extends PgStore { } const mempoolGarbageResults = await this.deleteGarbageCollectedMempoolTxs(sql); if (mempoolGarbageResults.deletedTxs.length > 0) { - logger.verbose( - `Garbage collected ${mempoolGarbageResults.deletedTxs.length} mempool txs` - ); + logger.debug(`Garbage collected ${mempoolGarbageResults.deletedTxs.length} mempool txs`); } garbageCollectedMempoolTxs = mempoolGarbageResults.deletedTxs; @@ -728,7 +719,7 @@ export class PgWriteStore extends PgStore { const candidateTxIds = data.txs.map(d => d.tx.tx_id); const removedTxsResult = await this.pruneMempoolTxs(sql, candidateTxIds); if (removedTxsResult.removedTxs.length > 0) { - logger.verbose( + logger.debug( `Removed ${removedTxsResult.removedTxs.length} microblock-txs from mempool table during microblock ingestion` ); } @@ -777,7 +768,7 @@ export class PgWriteStore extends PgStore { UPDATE mempool_txs SET pruned = true FROM txs - WHERE + WHERE mempool_txs.tx_id = txs.tx_id AND mempool_txs.pruned = false AND txs.canonical = true AND @@ -799,7 +790,7 @@ export class PgWriteStore extends PgStore { const tablesUpdates: Record = {}; const txsResult = await sql` UPDATE txs - SET + SET canonical = true, block_height = 1, tx_index = tx_index + 1, @@ -822,7 +813,7 @@ export class PgWriteStore extends PgStore { const txIndexBump = table === 'smart_contracts' ? sql`` : sql`tx_index = tx_index + 1,`; const metadataResult = await sql` UPDATE ${sql(table)} - SET + SET canonical = true, ${heightCol} = 1, ${txIndexBump} @@ -1871,7 +1862,7 @@ export class PgWriteStore extends PgStore { const result = await sql` INSERT INTO ft_metadata ${sql(values)} ON CONFLICT (contract_id) - DO + DO UPDATE SET ${sql(values)} `; await sql` @@ -1903,7 +1894,7 @@ export class PgWriteStore extends PgStore { const result = await sql` INSERT INTO nft_metadata ${sql(values)} ON CONFLICT (contract_id) - DO + DO UPDATE SET ${sql(values)} `; await sql` @@ -1944,7 +1935,7 @@ export class PgWriteStore extends PgStore { throw new Error(`Expected ${lockedInfos.length} inserts, got ${res.count}`); } } catch (e: any) { - logError(`Locked Info errors ${e.message}`, e); + logger.error(e, `Locked Info errors ${e.message}`); throw e; } } @@ -2029,7 +2020,7 @@ export class PgWriteStore extends PgStore { INSERT INTO faucet_requests ${this.sql(values)} `; } catch (error) { - logError(`Error performing faucet request update: ${error}`, error); + logger.error(error, `Error performing faucet request update: ${error}`); throw error; } } @@ -2145,7 +2136,7 @@ export class PgWriteStore extends PgStore { .map(tx => tx.tx_id); const removedTxsResult = await this.pruneMempoolTxs(sql, txsToPrune); if (removedTxsResult.removedTxs.length > 0) { - logger.verbose( + logger.debug( `Removed ${removedTxsResult.removedTxs.length} txs from mempool table during micro-reorg handling` ); } @@ -2254,7 +2245,7 @@ export class PgWriteStore extends PgStore { return { restoredTxs: [] }; } for (const txId of txIds) { - logger.verbose(`Restoring mempool tx: ${txId}`); + logger.debug(`Restoring mempool tx: ${txId}`); } const updatedRows = await sql<{ tx_id: string }[]>` @@ -2266,7 +2257,7 @@ export class PgWriteStore extends PgStore { const updatedTxs = updatedRows.map(r => r.tx_id); for (const tx of updatedTxs) { - logger.verbose(`Updated mempool tx: ${tx}`); + logger.debug(`Updated mempool tx: ${tx}`); } let restoredTxs = updatedRows.map(r => r.tx_id); @@ -2275,12 +2266,10 @@ export class PgWriteStore extends PgStore { if (updatedRows.length < txIds.length) { const txsRequiringInsertion = txIds.filter(txId => !updatedTxs.includes(txId)); - logger.verbose( - `To restore mempool txs, ${txsRequiringInsertion.length} txs require insertion` - ); + logger.debug(`To restore mempool txs, ${txsRequiringInsertion.length} txs require insertion`); const txs: TxQueryResult[] = await sql` - SELECT DISTINCT ON(tx_id) ${sql(TX_COLUMNS)} + SELECT DISTINCT ON(tx_id) ${sql(TX_COLUMNS)} FROM txs WHERE tx_id IN ${sql(txsRequiringInsertion)} ORDER BY tx_id, block_height DESC, microblock_sequence DESC, tx_index DESC @@ -2297,7 +2286,7 @@ export class PgWriteStore extends PgStore { restoredTxs = [...restoredTxs, ...txsRequiringInsertion]; for (const tx of mempoolTxs) { - logger.verbose(`Inserted mempool tx: ${tx.tx_id}`); + logger.debug(`Inserted mempool tx: ${tx.tx_id}`); } } @@ -2315,7 +2304,7 @@ export class PgWriteStore extends PgStore { return { removedTxs: [] }; } for (const txId of txIds) { - logger.verbose(`Pruning mempool tx: ${txId}`); + logger.debug(`Pruning mempool tx: ${txId}`); } const updateResults = await sql<{ tx_id: string }[]>` UPDATE mempool_txs @@ -2375,7 +2364,7 @@ export class PgWriteStore extends PgStore { updatedEntities.markedNonCanonical.txs += txResult.length; } for (const txId of txIds) { - logger.verbose(`Marked tx as ${canonical ? 'canonical' : 'non-canonical'}: ${txId.tx_id}`); + logger.debug(`Marked tx as ${canonical ? 'canonical' : 'non-canonical'}: ${txId.tx_id}`); } if (txIds.length) { await sql` @@ -2604,8 +2593,8 @@ export class PgWriteStore extends PgStore { updatedEntities.markedCanonical.microblocks += microblocksAccepted.size; updatedEntities.markedNonCanonical.microblocks += microblocksOrphaned.size; - microblocksOrphaned.forEach(mb => logger.verbose(`Marked microblock as non-canonical: ${mb}`)); - microblocksAccepted.forEach(mb => logger.verbose(`Marked microblock as canonical: ${mb}`)); + microblocksOrphaned.forEach(mb => logger.debug(`Marked microblock as non-canonical: ${mb}`)); + microblocksAccepted.forEach(mb => logger.debug(`Marked microblock as canonical: ${mb}`)); const markCanonicalResult = await this.markEntitiesCanonical( sql, @@ -2618,7 +2607,7 @@ export class PgWriteStore extends PgStore { markCanonicalResult.txsMarkedCanonical ); if (removedTxsResult.removedTxs.length > 0) { - logger.verbose( + logger.debug( `Removed ${removedTxsResult.removedTxs.length} txs from mempool table during reorg handling` ); } @@ -2775,9 +2764,9 @@ export class PgWriteStore extends PgStore { ], ]; const markedCanonical = updates.map(e => `${e[1]} ${e[0]}`).join(', '); - logger.verbose(`Entities marked as canonical: ${markedCanonical}`); + logger.debug(`Entities marked as canonical: ${markedCanonical}`); const markedNonCanonical = updates.map(e => `${e[2]} ${e[0]}`).join(', '); - logger.verbose(`Entities marked as non-canonical: ${markedNonCanonical}`); + logger.debug(`Entities marked as non-canonical: ${markedNonCanonical}`); } /** diff --git a/src/event-replay/event-replay.ts b/src/event-replay/event-replay.ts index 79cccd8e..53cb0775 100644 --- a/src/event-replay/event-replay.ts +++ b/src/event-replay/event-replay.ts @@ -8,9 +8,10 @@ import { import { cycleMigrations, dangerousDropAllTables } from '../datastore/migrations'; import { PgWriteStore } from '../datastore/pg-write-store'; import { startEventServer } from '../event-stream/event-server'; -import { getApiConfiguredChainID, HttpClientResponse, httpPostRequest, logger } from '../helpers'; +import { getApiConfiguredChainID, HttpClientResponse, httpPostRequest } from '../helpers'; import { importV1TokenOfferingData } from '../import-v1'; import { findTsvBlockHeight, getDbBlockHeight } from './helpers'; +import { logger } from '../logger'; enum EventImportMode { /** @@ -127,7 +128,6 @@ export async function importEventsFromTsv( chainId: getApiConfiguredChainID(), serverHost: '127.0.0.1', serverPort: 0, - httpLogLevel: 'debug', }); await importV1TokenOfferingData(db); diff --git a/src/event-stream/event-server.ts b/src/event-stream/event-server.ts index a7445938..f8c3e341 100644 --- a/src/event-stream/event-server.ts +++ b/src/event-stream/event-server.ts @@ -5,9 +5,7 @@ import * as express from 'express'; import * as bodyParser from 'body-parser'; import { asyncHandler } from '../api/async-handler'; import PQueue from 'p-queue'; -import * as expressWinston from 'express-winston'; -import * as winston from 'winston'; -import { getIbdBlockHeight, hexToBuffer, logError, logger, LogLevel } from '../helpers'; +import { getIbdBlockHeight, hexToBuffer } from '../helpers'; import { CoreNodeBlockMessage, CoreNodeEventType, @@ -70,6 +68,7 @@ import { import { handleBnsImport } from '../import-v1'; import { Pox2ContractIdentifer } from '../pox-helpers'; import { decodePox2PrintEvent } from './pox2-event-parsing'; +import { logger, loggerMiddleware } from '../logger'; export const IBD_PRUNABLE_ROUTES = ['/new_mempool_tx', '/drop_mempool_tx', '/new_microblocks']; @@ -85,7 +84,7 @@ async function handleBurnBlockMessage( burnBlockMsg: CoreNodeBurnBlockMessage, db: PgWriteStore ): Promise { - logger.verbose( + logger.debug( `Received burn block message hash ${burnBlockMsg.burn_block_hash}, height: ${burnBlockMsg.burn_block_height}, reward recipients: ${burnBlockMsg.reward_recipients.length}` ); const rewards = burnBlockMsg.reward_recipients.map((r, index) => { @@ -123,7 +122,7 @@ async function handleBurnBlockMessage( } async function handleMempoolTxsMessage(rawTxs: string[], db: PgWriteStore): Promise { - logger.verbose(`Received ${rawTxs.length} mempool transactions`); + logger.debug(`Received ${rawTxs.length} mempool transactions`); // TODO: mempool-tx receipt date should be sent from the core-node const receiptDate = Math.round(Date.now() / 1000); const decodedTxs = rawTxs.map(str => { @@ -139,7 +138,7 @@ async function handleMempoolTxsMessage(rawTxs: string[], db: PgWriteStore): Prom }; }); const dbMempoolTxs = decodedTxs.map(tx => { - logger.verbose(`Received mempool tx: ${tx.txId}`); + logger.debug(`Received mempool tx: ${tx.txId}`); const dbMempoolTx = createDbMempoolTxFromCoreMsg({ txId: tx.txId, txData: tx.txData, @@ -157,7 +156,7 @@ async function handleDroppedMempoolTxsMessage( msg: CoreNodeDropMempoolTxMessage, db: PgWriteStore ): Promise { - logger.verbose(`Received ${msg.dropped_txids.length} dropped mempool txs`); + logger.debug(`Received ${msg.dropped_txids.length} dropped mempool txs`); const dbTxStatus = getTxDbStatus(msg.reason); await db.dropMempoolTxs({ status: dbTxStatus, txIds: msg.dropped_txids }); } @@ -167,7 +166,7 @@ async function handleMicroblockMessage( msg: CoreNodeMicroblockMessage, db: PgWriteStore ): Promise { - logger.verbose(`Received microblock with ${msg.transactions.length} txs`); + logger.debug(`Received microblock with ${msg.transactions.length} txs`); const dbMicroblocks = parseMicroblocksFromTxs({ parentIndexBlockHash: msg.parent_index_block_hash, txs: msg.transactions, @@ -202,7 +201,7 @@ async function handleMicroblockMessage( } }); parsedTxs.forEach(tx => { - logger.verbose(`Received microblock mined tx: ${tx.core_tx.txid}`); + logger.debug(`Received microblock mined tx: ${tx.core_tx.txid}`); }); const updateData: DataStoreMicroblockUpdateData = { microblocks: dbMicroblocks, @@ -255,7 +254,7 @@ async function handleBlockMessage( execution_cost_write_length: 0, }; - logger.verbose(`Received block ${msg.block_hash} (${msg.block_height}) from node`, dbBlock); + logger.debug(`Received block ${msg.block_hash} (${msg.block_height}) from node`, dbBlock); const dbMinerRewards: DbMinerReward[] = []; for (const minerReward of msg.matured_miner_rewards) { @@ -276,7 +275,7 @@ async function handleBlockMessage( dbMinerRewards.push(dbMinerReward); } - logger.verbose(`Received ${dbMinerRewards.length} matured miner rewards`); + logger.debug(`Received ${dbMinerRewards.length} matured miner rewards`); const dbMicroblocks = parseMicroblocksFromTxs({ parentIndexBlockHash: msg.parent_index_block_hash, @@ -301,7 +300,7 @@ async function handleBlockMessage( }); parsedTxs.forEach(tx => { - logger.verbose(`Received anchor block mined tx: ${tx.core_tx.txid}`); + logger.debug(`Received anchor block mined tx: ${tx.core_tx.txid}`); logger.info('Transaction confirmed', { txid: tx.core_tx.txid, in_microblock: tx.microblock_hash != '', @@ -377,7 +376,7 @@ function parseDataStoreTxEventData( for (const event of events) { if (!event.committed) { - logger.verbose(`Ignoring uncommitted tx event from tx ${event.txid}`); + logger.debug(`Ignoring uncommitted tx event from tx ${event.txid}`); continue; } const dbTx = dbData.find(entry => entry.tx.tx_id === event.txid); @@ -393,11 +392,11 @@ function parseDataStoreTxEventData( } catch (e) { logger.warn(`Failed to decode contract log event: ${event.contract_event.raw_value}`); } - logger.verbose( + logger.debug( `Ignoring tx event from unsuccessful tx ${event.txid}, status: ${dbTx.tx.status}, repr: ${reprStr}` ); } else { - logger.verbose( + logger.debug( `Ignoring tx event from unsuccessful tx ${event.txid}, status: ${dbTx.tx.status}` ); } @@ -668,7 +667,7 @@ function createMessageProcessorQueue(): EventMessageHandler { return processorQueue .add(() => handleRawEventRequest(eventPath, payload, db)) .catch(e => { - logError(`Error storing raw core node request data`, e, payload); + logger.error(e, 'Error storing raw core node request data'); throw e; }); }, @@ -676,7 +675,7 @@ function createMessageProcessorQueue(): EventMessageHandler { return processorQueue .add(() => handleBlockMessage(chainId, msg, db)) .catch(e => { - logError(`Error processing core node block message`, e, msg); + logger.error(e, 'Error processing core node block message'); throw e; }); }, @@ -688,7 +687,7 @@ function createMessageProcessorQueue(): EventMessageHandler { return processorQueue .add(() => handleMicroblockMessage(chainId, msg, db)) .catch(e => { - logError(`Error processing core node microblock message`, e, msg); + logger.error(e, 'Error processing core node microblock message'); throw e; }); }, @@ -696,7 +695,7 @@ function createMessageProcessorQueue(): EventMessageHandler { return processorQueue .add(() => handleBurnBlockMessage(msg, db)) .catch(e => { - logError(`Error processing core node burn block message`, e, msg); + logger.error(e, 'Error processing core node burn block message'); throw e; }); }, @@ -704,7 +703,7 @@ function createMessageProcessorQueue(): EventMessageHandler { return processorQueue .add(() => handleMempoolTxsMessage(rawTxs, db)) .catch(e => { - logError(`Error processing core node mempool message`, e, rawTxs); + logger.error(e, 'Error processing core node mempool message'); throw e; }); }, @@ -712,7 +711,7 @@ function createMessageProcessorQueue(): EventMessageHandler { return processorQueue .add(() => handleDroppedMempoolTxsMessage(msg, db)) .catch(e => { - logError(`Error processing core node dropped mempool txs message`, e, msg); + logger.error(e, 'Error processing core node dropped mempool txs message'); throw e; }); }, @@ -720,7 +719,7 @@ function createMessageProcessorQueue(): EventMessageHandler { return processorQueue .add(() => handleNewAttachmentMessage(msg, db)) .catch(e => { - logError(`Error processing new attachment message`, e, msg); + logger.error(e, 'Error processing new attachment message'); throw e; }); }, @@ -742,7 +741,6 @@ export async function startEventServer(opts: { serverHost?: string; /** If not specified, this is read from the STACKS_CORE_EVENT_PORT env var. */ serverPort?: number; - httpLogLevel?: LogLevel; }): Promise { const db = opts.datastore; const messageHandler = opts.messageHandler ?? createMessageProcessorQueue(); @@ -768,7 +766,7 @@ export async function startEventServer(opts: { const handleRawEventRequest = asyncHandler(async req => { await messageHandler.handleRawEventRequest(req.path, req.body, db); - if (logger.isDebugEnabled()) { + if (logger.level === 'debug') { const eventPath = req.path; let payload = JSON.stringify(req.body); // Skip logging massive event payloads, this _should_ only exclude the genesis block payload which is ~80 MB. @@ -779,18 +777,7 @@ export async function startEventServer(opts: { } }); - app.use( - expressWinston.logger({ - format: logger.format, - transports: logger.transports, - metaField: (null as unknown) as string, - statusLevels: { - error: 'error', - warn: opts.httpLogLevel ?? 'http', - success: opts.httpLogLevel ?? 'http', - }, - }) - ); + app.use(loggerMiddleware); app.use(bodyParser.json({ type: 'application/json', limit: '500MB' })); @@ -831,7 +818,7 @@ export async function startEventServer(opts: { res.status(200).json({ result: 'ok' }); next(); } catch (error) { - logError(`error processing core-node /new_block: ${error}`, error); + logger.error(error, 'error processing core-node /new_block'); res.status(500).json({ error: error }); } }), @@ -847,7 +834,7 @@ export async function startEventServer(opts: { res.status(200).json({ result: 'ok' }); next(); } catch (error) { - logError(`error processing core-node /new_burn_block: ${error}`, error); + logger.error(error, 'error processing core-node /new_burn_block'); res.status(500).json({ error: error }); } }), @@ -863,7 +850,7 @@ export async function startEventServer(opts: { res.status(200).json({ result: 'ok' }); next(); } catch (error) { - logError(`error processing core-node /new_mempool_tx: ${error}`, error); + logger.error(error, 'error processing core-node /new_mempool_tx'); res.status(500).json({ error: error }); } }), @@ -879,7 +866,7 @@ export async function startEventServer(opts: { res.status(200).json({ result: 'ok' }); next(); } catch (error) { - logError(`error processing core-node /drop_mempool_tx: ${error}`, error); + logger.error(error, 'error processing core-node /drop_mempool_tx'); res.status(500).json({ error: error }); } }), @@ -895,7 +882,7 @@ export async function startEventServer(opts: { res.status(200).json({ result: 'ok' }); next(); } catch (error) { - logError(`error processing core-node /attachments/new: ${error}`, error); + logger.error(error, 'error processing core-node /attachments/new'); res.status(500).json({ error: error }); } }), @@ -911,7 +898,7 @@ export async function startEventServer(opts: { res.status(200).json({ result: 'ok' }); next(); } catch (error) { - logError(`error processing core-node /new_microblocks: ${error}`, error); + logger.error(error, 'error processing core-node /new_microblocks'); res.status(500).json({ error: error }); } }), @@ -920,18 +907,10 @@ export async function startEventServer(opts: { app.post('*', (req, res, next) => { res.status(404).json({ error: `no route handler for ${req.path}` }); - logError(`Unexpected event on path ${req.path}`); + logger.error(`Unexpected event on path ${req.path}`); next(); }); - app.use( - expressWinston.errorLogger({ - winstonInstance: logger as winston.Logger, - metaField: (null as unknown) as string, - blacklistedMetaFields: ['trace', 'os', 'process'], - }) - ); - const server = createServer(app); await new Promise((resolve, reject) => { server.once('error', error => { diff --git a/src/event-stream/pox2-event-parsing.ts b/src/event-stream/pox2-event-parsing.ts index 2b8bdf0d..653284d6 100644 --- a/src/event-stream/pox2-event-parsing.ts +++ b/src/event-stream/pox2-event-parsing.ts @@ -13,7 +13,7 @@ import { DbPox2StackIncreaseEvent, DbPox2StackStxEvent, } from '../datastore/common'; -import { bufferToHexPrefixString, coerceToBuffer, has0xPrefix, logger } from '../helpers'; +import { bufferToHexPrefixString, coerceToBuffer, has0xPrefix } from '../helpers'; import { ClarityTypeID, @@ -32,6 +32,7 @@ import { } from 'stacks-encoding-native-js'; import { poxAddressToBtcAddress } from '@stacks/stacking'; import { Pox2EventName } from '../pox-helpers'; +import { logger } from '../logger'; function tryClarityPoxAddressToBtcAddress( poxAddr: Pox2Addr | ClarityValueOptionalSome | ClarityValueOptionalNone, @@ -54,7 +55,7 @@ function tryClarityPoxAddressToBtcAddress( network ); } catch (e) { - logger.verbose( + logger.debug( `Error encoding PoX address version: ${poxAddr.data.version.buffer}, hashbytes: ${poxAddr.data.hashbytes.buffer} to bitcoin address: ${e}` ); btcAddr = null; diff --git a/src/event-stream/reader.ts b/src/event-stream/reader.ts index 72cce745..3e8f2eb7 100644 --- a/src/event-stream/reader.ts +++ b/src/event-stream/reader.ts @@ -42,8 +42,6 @@ import { import { NotImplementedError } from '../errors'; import { getEnumDescription, - logger, - logError, I32_MAX, bufferToHexPrefixString, hexToBuffer, @@ -72,6 +70,7 @@ import { c32ToB58 } from 'c32check'; import { decodePox2PrintEvent } from './pox2-event-parsing'; import { Pox2ContractIdentifer, Pox2EventName } from '../pox-helpers'; import { principalCV } from '@stacks/transactions/dist/clarity/types/principalCV'; +import { logger } from '../logger'; export function getTxSenderAddress(tx: DecodedTxResult): string { const txSender = tx.auth.origin_condition.signer.address; @@ -715,7 +714,7 @@ export function parseMessageTransaction( ); txSender = getTxSenderAddress(rawTx); } else { - logError( + logger.error( `BTC transaction found, but no STX transfer event available to recreate transaction. TX: ${JSON.stringify( coreTx )}, event: ${JSON.stringify(events)}` @@ -752,24 +751,24 @@ export function parseMessageTransaction( } case TxPayloadTypeID.CoinbaseToAltRecipient: { if (payload.recipient.type_id === PrincipalTypeID.Standard) { - logger.verbose( + logger.debug( `Coinbase to alt recipient, standard principal: ${payload.recipient.address}` ); } else { - logger.verbose( + logger.debug( `Coinbase to alt recipient, contract principal: ${payload.recipient.address}.${payload.recipient.contract_name}` ); } break; } case TxPayloadTypeID.SmartContract: { - logger.verbose( + logger.debug( `Smart contract deployed: ${parsedTx.sender_address}.${payload.contract_name}` ); break; } case TxPayloadTypeID.ContractCall: { - logger.verbose( + logger.debug( `Contract call: ${payload.address}.${payload.contract_name}.${payload.function_name}` ); break; @@ -779,19 +778,19 @@ export function parseMessageTransaction( if (payload.recipient.type_id === PrincipalTypeID.Contract) { recipientPrincipal += '.' + payload.recipient.contract_name; } - logger.verbose( + logger.debug( `Token transfer: ${payload.amount} from ${parsedTx.sender_address} to ${recipientPrincipal}` ); break; } case TxPayloadTypeID.PoisonMicroblock: { - logger.verbose( + logger.debug( `Poison microblock: header1 ${payload.microblock_header_1}), header2: ${payload.microblock_header_2}` ); break; } case TxPayloadTypeID.VersionedSmartContract: { - logger.verbose( + logger.debug( `Versioned smart contract deployed: Clarity version ${payload.clarity_version}, ${parsedTx.sender_address}.${payload.contract_name}` ); break; @@ -807,7 +806,7 @@ export function parseMessageTransaction( } return parsedTx; } catch (error) { - logError(`error parsing message transaction ${JSON.stringify(coreTx)}: ${error}`, error); + logger.error(error, `error parsing message transaction ${JSON.stringify(coreTx)}`); throw error; } } diff --git a/src/helpers.ts b/src/helpers.ts index 0991374c..834c25fa 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -18,15 +18,10 @@ import { isValidStacksAddress, stacksToBitcoinAddress } from 'stacks-encoding-na import * as stream from 'stream'; import * as ecc from 'tiny-secp256k1'; import * as util from 'util'; -import * as winston from 'winston'; -import { - CliConfigSetColors, - NpmConfigSetLevels, - SyslogConfigSetLevels, -} from 'winston/lib/winston/config'; import { StacksCoreRpcClient } from './core-rpc/client'; import { DbEventTypeId } from './datastore/common'; import { createHash } from 'node:crypto'; +import { logger } from './logger'; export const isDevEnv = process.env.NODE_ENV === 'development'; export const isTestEnv = process.env.NODE_ENV === 'test'; @@ -148,79 +143,12 @@ export function loadDotEnv(): void { } const dotenvConfig = dotenv.config({ silent: true }); if (dotenvConfig.error) { - logError(`Error loading .env file: ${dotenvConfig.error}`, dotenvConfig.error); + logger.error(dotenvConfig.error, 'Error loading .env file'); throw dotenvConfig.error; } didLoadDotEnv = true; } -type EqualsTest = () => A extends T ? 1 : 0; -type Equals = EqualsTest extends EqualsTest ? 1 : 0; -type Filter = Equals extends 1 ? never : K; -type OmitIndex = { - [K in keyof T as Filter]: T[K]; -}; -type KnownKeys = keyof OmitIndex, string>; - -export type LogLevel = KnownKeys; -type DisabledLogLevels = Exclude< - KnownKeys | KnownKeys, - LogLevel ->; -type LoggerInterface = Omit & { level: LogLevel }; - -const LOG_LEVELS: LogLevel[] = ['error', 'warn', 'info', 'http', 'verbose', 'debug', 'silly']; -const defaultLogLevel: LogLevel = (() => { - const STACKS_API_LOG_LEVEL_ENV_VAR = 'STACKS_API_LOG_LEVEL'; - const logLevelEnvVar = process.env[ - STACKS_API_LOG_LEVEL_ENV_VAR - ]?.toLowerCase().trim() as LogLevel; - if (logLevelEnvVar) { - if (LOG_LEVELS.includes(logLevelEnvVar)) { - return logLevelEnvVar; - } - throw new Error( - `Invalid ${STACKS_API_LOG_LEVEL_ENV_VAR}, should be one of ${LOG_LEVELS.join(',')}` - ); - } - if (isDevEnv) { - return 'debug'; - } - return 'info'; -})(); - -export const logger = winston.createLogger({ - level: defaultLogLevel, - exitOnError: false, - defaultMeta: { component: 'core-api' }, - format: winston.format.combine( - winston.format.metadata(), - winston.format.timestamp(), - winston.format.json(), - winston.format.errors({ stack: true }) - ), - transports: [ - new winston.transports.Console({ - handleExceptions: true, - }), - ], -}) as LoggerInterface; - -export function logError(message: string, ...errorData: any[]) { - if (isDevEnv) { - console.error(message); - if (errorData?.length > 0) { - errorData.forEach(e => console.error(e)); - } - } else { - if (errorData?.length > 0) { - logger.error(message, ...errorData); - } else { - logger.error(message); - } - } -} - export function formatMapToObject( map: Map, formatter: (value: TValue) => TFormatted @@ -1111,7 +1039,7 @@ export async function getStacksNodeChainID(): Promise { export function getApiConfiguredChainID() { if (!('STACKS_CHAIN_ID' in process.env)) { const error = new Error(`Env var STACKS_CHAIN_ID is not set`); - logError(error.message, error); + logger.error(error, error.message); throw error; } const configuredChainID: ChainID = parseInt(process.env['STACKS_CHAIN_ID'] as string); diff --git a/src/import-v1/index.ts b/src/import-v1/index.ts index 157687d6..7acb01d3 100644 --- a/src/import-v1/index.ts +++ b/src/import-v1/index.ts @@ -14,17 +14,11 @@ import { DbConfigState, DbTokenOfferingLocked, } from '../datastore/common'; -import { - asyncBatchIterate, - asyncIterableToGenerator, - I32_MAX, - logError, - logger, - REPO_DIR, -} from '../helpers'; +import { asyncBatchIterate, asyncIterableToGenerator, I32_MAX, REPO_DIR } from '../helpers'; import { BnsGenesisBlock, getBnsGenesisBlockFromBlockMessage } from '../event-replay/helpers'; import { PgSqlClient } from '../datastore/connection'; import { PgWriteStore } from '../datastore/pg-write-store'; +import { logger } from '../logger'; const finished = util.promisify(stream.finished); const pipeline = util.promisify(stream.pipeline); @@ -168,7 +162,7 @@ class ChainProcessor extends stream.Writable { await this.db.updateNames(this.sql, this.genesisBlock, obj); this.rowCount += 1; if (obj.zonefile === '') { - logger.verbose( + logger.debug( `${this.tag}: [non-critical] no zonefile for ${obj.name} hash ${obj.zonefile_hash}` ); } @@ -276,7 +270,7 @@ async function readZones(zfname: string): Promise> { const hashes = new Map(); const zstream = stream.pipeline(fs.createReadStream(zfname), new LineReaderStream(), err => { - if (err) logError(`readzones: ${err}`); + if (err) logger.error(err, `readzones: ${err}`); }); const generator = asyncIterableToGenerator(zstream); @@ -304,7 +298,7 @@ async function valid(fileName: string): Promise { await pipeline(fs.createReadStream(fileName), hash); const calchash = hash.digest('hex'); if (expected !== calchash) { - logError(`calculated ${calchash} for ${fileName} != ${expected}`); + logger.error(`calculated ${calchash} for ${fileName} != ${expected}`); return false; } return true; @@ -404,7 +398,7 @@ async function validateBnsImportDir(importDir: string, importFiles: string[]) { throw new Error(`${importDir} is not a directory`); } } catch (error) { - logError(`Cannot import from ${importDir}`, error); + logger.error(error, `Cannot import from ${importDir}`); throw error; } @@ -413,7 +407,7 @@ async function validateBnsImportDir(importDir: string, importFiles: string[]) { for (const fname of importFiles) { if (!(await valid(path.join(importDir, fname)))) { const errMsg = `Cannot read import file due to sha256 mismatch: ${fname}`; - logError(errMsg); + logger.error(errMsg); throw new Error(errMsg); } } @@ -426,7 +420,7 @@ export async function importV1BnsNames( ) { const configState = await db.getConfigState(); if (configState.bns_names_onchain_imported) { - logger.verbose('Stacks 1.0 BNS names are already imported'); + logger.debug('Stacks 1.0 BNS names are already imported'); return; } await validateBnsImportDir(importDir, ['chainstate.txt', 'name_zonefiles.txt']); @@ -454,7 +448,7 @@ export async function importV1BnsSubdomains( ) { const configState = await db.getConfigState(); if (configState.bns_subdomains_imported) { - logger.verbose('Stacks 1.0 BNS subdomains are already imported'); + logger.debug('Stacks 1.0 BNS subdomains are already imported'); return; } await validateBnsImportDir(importDir, ['subdomains.csv', 'subdomain_zonefiles.txt']); @@ -502,7 +496,7 @@ class Sha256PassThrough extends stream.PassThrough { export async function importV1TokenOfferingData(db: PgWriteStore) { const configState = await db.getConfigState(); if (configState.token_offering_imported) { - logger.verbose('Stacks 1.0 token offering data is already imported'); + logger.debug('Stacks 1.0 token offering data is already imported'); return; } diff --git a/src/index.ts b/src/index.ts index 9727cc6c..38eeee7d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,8 +1,6 @@ import { loadDotEnv, timeout, - logger, - logError, isProdEnv, numberToHex, parseArgBoolean, @@ -27,6 +25,7 @@ import { PgWriteStore } from './datastore/pg-write-store'; import { isFtMetadataEnabled, isNftMetadataEnabled } from './token-metadata/helpers'; import { TokensProcessorQueue } from './token-metadata/tokens-processor-queue'; import { registerMempoolPromStats } from './datastore/helpers'; +import { logger } from './logger'; enum StacksApiMode { /** @@ -97,7 +96,7 @@ async function monitorCoreRpcConnection(): Promise { previouslyConnected = true; } catch (error) { previouslyConnected = false; - logger.error(`Warning: failed to connect to node RPC server at ${client.endpoint}`, error); + logger.error(error, `Warning: failed to connect to node RPC server at ${client.endpoint}`); } await timeout(CORE_RPC_HEARTBEAT_INTERVAL); } @@ -146,11 +145,11 @@ async function init(): Promise { const error = new Error( `The configured STACKS_CHAIN_ID does not match, configured: ${chainIdConfig}, stacks-node: ${chainIdNode}` ); - logError(error.message, error); + logger.error(error, error.message); throw error; } monitorCoreRpcConnection().catch(error => { - logger.error(`Error monitoring RPC connection: ${error}`, error); + logger.error(error, 'Error monitoring RPC connection'); }); if (!isFtMetadataEnabled()) { @@ -239,7 +238,7 @@ function initApp() { logger.info('App initialized'); }) .catch(error => { - logError(`app failed to start: ${error}`, error); + logger.error(error, 'app failed to start'); process.exit(1); }); } diff --git a/src/inspector-util.ts b/src/inspector-util.ts index 69663c88..eb41fa1c 100644 --- a/src/inspector-util.ts +++ b/src/inspector-util.ts @@ -4,20 +4,13 @@ import { once } from 'events'; import { createServer, Server } from 'http'; import * as express from 'express'; import { asyncHandler } from './api/async-handler'; -import { - logError, - logger, - parsePort, - stopwatch, - timeout, - pipelineAsync, - Stopwatch, -} from './helpers'; +import { parsePort, stopwatch, timeout, pipelineAsync, Stopwatch } from './helpers'; import { Socket } from 'net'; import * as os from 'os'; import * as path from 'path'; import * as fs from 'fs'; import { createProfiler, startProfiler, stopProfiler } from 'stacks-encoding-native-js'; +import { logger } from './logger'; type CpuProfileResult = inspector.Profiler.Profile; @@ -55,7 +48,7 @@ function initCpuProfiling(samplingInterval?: number): ProfilerInstance { if (error) { - logError(`[CpuProfiler] Error enabling profiling: ${error}`, error); + logger.error(error, '[CpuProfiler] Error enabling profiling'); reject(error); } else { logger.info(`[CpuProfiler] Profiling enabled`); @@ -63,7 +56,7 @@ function initCpuProfiling(samplingInterval?: number): ProfilerInstance { if (error) { - logError(`[CpuProfiler] Error setting sampling interval: ${error}`, error); + logger.error(error, '[CpuProfiler] Error setting sampling interval'); reject(error); } else { logger.info(`[CpuProfiler] Set sampling interval`); @@ -83,7 +76,7 @@ function initCpuProfiling(samplingInterval?: number): ProfilerInstance { if (error) { - logError(`[CpuProfiler] Error starting profiling: ${error}`, error); + logger.error(error, '[CpuProfiler] Error starting profiling'); reject(error); } else { sessionStopwatch.restart(); @@ -104,7 +97,7 @@ function initCpuProfiling(samplingInterval?: number): ProfilerInstance { if (error) { - logError(`[CpuProfiler] Error stopping profiling: ${error}`, error); + logger.error(error, '[CpuProfiler] Error stopping profiling'); reject(error); } else { logger.info(`[CpuProfiler] Profiling stopped`); @@ -146,7 +139,7 @@ function initCpuProfiling(samplingInterval?: number): ProfilerInstance { if (error) { - logError(`[HeapProfiler] Error enabling profiling: ${error}`, error); + logger.error(error, '[HeapProfiler] Error enabling profiling'); reject(error); } else { sw.restart(); @@ -201,7 +194,7 @@ function initHeapSnapshot( } }); } catch (error) { - logError(`[HeapProfiler] Error enabling profiling: ${error}`, error); + logger.error(error, '[HeapProfiler] Error enabling profiling'); reject(error); } }); @@ -217,8 +210,8 @@ function initHeapSnapshot( outputStream.write(message.params.chunk, error => { if (error) { logger.error( - `[HeapProfiler] Error writing heap profile chunk to output stream: ${error.message}`, - error + error, + `[HeapProfiler] Error writing heap profile chunk to output stream: ${error.message}` ); } }); @@ -231,7 +224,7 @@ function initHeapSnapshot( try { session.post('HeapProfiler.takeHeapSnapshot', undefined, (error: Error | null) => { if (error) { - logError(`[HeapProfiler] Error taking snapshot: ${error}`, error); + logger.error(error, '[HeapProfiler] Error taking snapshot'); reject(error); } else { logger.info( @@ -241,7 +234,7 @@ function initHeapSnapshot( } }); } catch (error) { - logError(`[HeapProfiler] Error taking snapshot: ${error}`, error); + logger.error(error, '[HeapProfiler] Error taking snapshot'); reject(error); } }); @@ -266,7 +259,7 @@ function initHeapSnapshot( logger.info(`[HeapProfiler] Profiler already disconnected`); resolve(); } else if (error) { - logError(`[HeapProfiler] Error disabling profiling: ${error}`, error); + logger.error(error, '[HeapProfiler] Error disabling profiling'); reject(error); } else { logger.info(`[HeapProfiler] Profiling disabled`); diff --git a/src/logger.ts b/src/logger.ts new file mode 100644 index 00000000..d77d5699 --- /dev/null +++ b/src/logger.ts @@ -0,0 +1,29 @@ +import pino from 'pino'; +import pinoHttp from 'pino-http'; + +// Common logging configuration +const loggingConfiguration = { + name: 'stacks-blockchain-api', + level: process.env.STACKS_API_LOG_LEVEL_ENV_VAR || 'info', + timestamp: pino.stdTimeFunctions.isoTime, + formatters: { + level: (label: string, number: number) => ({ level: label }), + }, + mixin: function () { + return { component: 'core-api' }; + }, + customLogLevel: function (_req: any, res: any, err: any) { + if (res.statusCode >= 400 && res.statusCode < 500) { + return 'warn'; + } else if (res.statusCode >= 500 || err) { + return 'error'; + } + return 'info'; + }, + autoLogging: false, +}; + +// ad-hoc logger +export const logger = pino(loggingConfiguration); +// logger middleware used by the web application framework +export const loggerMiddleware = pinoHttp(loggingConfiguration); diff --git a/src/rosetta-helpers.ts b/src/rosetta-helpers.ts index fb9c5f9d..a348aa09 100644 --- a/src/rosetta-helpers.ts +++ b/src/rosetta-helpers.ts @@ -55,7 +55,7 @@ import { StxUnlockEvent, } from './datastore/common'; import { getTxSenderAddress, getTxSponsorAddress } from './event-stream/reader'; -import { unwrapOptional, hexToBuffer, logger, getSendManyContract } from './helpers'; +import { unwrapOptional, hexToBuffer, getSendManyContract } from './helpers'; import { getCoreNodeEndpoint } from './core-rpc/client'; import { TokenMetadataErrorMode } from './token-metadata/tokens-contract-handler'; @@ -80,6 +80,7 @@ import { PgStore } from './datastore/pg-store'; import { isFtMetadataEnabled, tokenMetadataErrorMode } from './token-metadata/helpers'; import { poxAddressToBtcAddress } from '@stacks/stacking'; import { parseRecoverableSignatureVrs } from '@stacks/common'; +import { logger } from './logger'; enum CoinAction { CoinSpent = 'coin_spent', diff --git a/src/shutdown-handler.ts b/src/shutdown-handler.ts index 391beb06..58e1a847 100644 --- a/src/shutdown-handler.ts +++ b/src/shutdown-handler.ts @@ -1,4 +1,5 @@ -import { logError, logger, resolveOrTimeout } from './helpers'; +import { resolveOrTimeout } from './helpers'; +import { logger } from './logger'; const SHUTDOWN_SIGNALS = ['SIGINT', 'SIGTERM'] as const; @@ -29,7 +30,7 @@ async function startShutdown() { timeoutMs, !config.forceKillable, () => - logError( + logger.error( `${config.name} is taking longer than expected to shutdown, possibly hanging indefinitely` ) ); @@ -37,7 +38,7 @@ async function startShutdown() { if (config.forceKillable && config.forceKillHandler) { await Promise.resolve(config.forceKillHandler()); } - logError( + logger.error( `${config.name} was force killed after taking longer than ${timeoutMs}ms to shutdown` ); } else { @@ -45,7 +46,7 @@ async function startShutdown() { } } catch (error) { errorEncountered = true; - logError(`Error running ${config.name} shutdown handler`, error); + logger.error(`Error running ${config.name} shutdown handler`, error); } } if (errorEncountered) { @@ -70,12 +71,12 @@ function registerShutdownSignals() { }); }); process.once('unhandledRejection', error => { - logError(`unhandledRejection ${(error as any)?.message ?? error}`, error as Error); + logger.error(error, 'unhandledRejection'); logger.error(`Shutting down... received unhandledRejection.`); void startShutdown(); }); process.once('uncaughtException', error => { - logError(`Received uncaughtException: ${error}`, error); + logger.error(`Received uncaughtException: ${error}`, error); logger.error(`Shutting down... received uncaughtException.`); void startShutdown(); }); diff --git a/src/tests-bns/api.ts b/src/tests-bns/api.ts index 1d5b5588..b5816871 100644 --- a/src/tests-bns/api.ts +++ b/src/tests-bns/api.ts @@ -54,7 +54,7 @@ describe('BNS API tests', () => { await cycleMigrations(); db = await PgWriteStore.connect({ usageName: 'tests' }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); const block = new TestBlockBuilder({ block_hash: '0xff', diff --git a/src/tests-bns/bns-integration-tests.ts b/src/tests-bns/bns-integration-tests.ts index 7dcd4c2c..a3d008da 100644 --- a/src/tests-bns/bns-integration-tests.ts +++ b/src/tests-bns/bns-integration-tests.ts @@ -17,7 +17,7 @@ import { SignedContractCallOptions, noneCV, } from '@stacks/transactions'; -import { logger } from '../helpers'; +import { logger } from '../logger'; import { testnetKeys } from '../api/routes/debug'; import { TestBlockBuilder } from '../test-utils/test-builders'; import { PgWriteStore } from '../datastore/pg-write-store'; @@ -336,8 +336,8 @@ describe('BNS integration tests', () => { process.env.PG_DATABASE = 'postgres'; await cycleMigrations(); db = await PgWriteStore.connect({ usageName: 'tests', skipMigrations: true }); - eventServer = await startEventServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + eventServer = await startEventServer({ datastore: db, chainId: ChainID.Testnet }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); const block = new TestBlockBuilder().build(); await db.update(block); @@ -606,4 +606,3 @@ describe('BNS integration tests', () => { await runMigrations(undefined, 'down'); }); }); - diff --git a/src/tests-bns/event-server-tests.ts b/src/tests-bns/event-server-tests.ts index 3f64f93a..d0170337 100644 --- a/src/tests-bns/event-server-tests.ts +++ b/src/tests-bns/event-server-tests.ts @@ -23,8 +23,7 @@ describe('BNS event server tests', () => { datastore: db, chainId: ChainID.Mainnet, serverHost: '127.0.0.1', - serverPort: 0, - httpLogLevel: 'debug', + serverPort: 0 }); }); diff --git a/src/tests-bns/v1-import-tests.ts b/src/tests-bns/v1-import-tests.ts index e1bbb921..f9a0011e 100644 --- a/src/tests-bns/v1-import-tests.ts +++ b/src/tests-bns/v1-import-tests.ts @@ -21,8 +21,8 @@ describe('BNS V1 import', () => { process.env.PG_DATABASE = 'postgres'; await cycleMigrations(); db = await PgWriteStore.connect({ usageName: 'tests' }); - eventServer = await startEventServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + eventServer = await startEventServer({ datastore: db, chainId: ChainID.Testnet }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); block = new TestBlockBuilder().addTx().build(); await db.update(block); diff --git a/src/tests-btc-faucet/faucet-btc-tests.ts b/src/tests-btc-faucet/faucet-btc-tests.ts index 2c3e51c2..78bc9f2a 100644 --- a/src/tests-btc-faucet/faucet-btc-tests.ts +++ b/src/tests-btc-faucet/faucet-btc-tests.ts @@ -120,7 +120,6 @@ describe('btc faucet', () => { datastore: db, writeDatastore: writeDb, chainId: ChainID.Testnet, - httpLogLevel: 'silly', }); }); diff --git a/src/tests-event-replay/import-export-tests.ts b/src/tests-event-replay/import-export-tests.ts index ea573793..ee6bd96e 100644 --- a/src/tests-event-replay/import-export-tests.ts +++ b/src/tests-event-replay/import-export-tests.ts @@ -171,7 +171,6 @@ describe('IBD', () => { chainId: ChainID.Mainnet, serverHost: '127.0.0.1', serverPort: 0, - httpLogLevel: 'debug', }); return [eventServer, eventServer.closeAsync] as const; }, diff --git a/src/tests-event-replay/raw-event-request-tests.ts b/src/tests-event-replay/raw-event-request-tests.ts index 0dedaf8e..dfa07f34 100644 --- a/src/tests-event-replay/raw-event-request-tests.ts +++ b/src/tests-event-replay/raw-event-request-tests.ts @@ -24,7 +24,6 @@ describe('Events table', () => { chainId: ChainID.Mainnet, serverHost: '127.0.0.1', serverPort: 0, - httpLogLevel: 'debug', }); }); @@ -50,7 +49,6 @@ describe('Events table', () => { chainId: ChainID.Mainnet, serverHost: '127.0.0.1', serverPort: 0, - httpLogLevel: 'debug', }); return [eventServer, eventServer.closeAsync] as const; }, diff --git a/src/tests-rosetta/account-tests.ts b/src/tests-rosetta/account-tests.ts index b3fee4fd..f6196b1f 100644 --- a/src/tests-rosetta/account-tests.ts +++ b/src/tests-rosetta/account-tests.ts @@ -15,7 +15,7 @@ describe('/account tests', () => { process.env.PG_DATABASE = 'postgres'; await cycleMigrations(); db = await PgWriteStore.connect({ usageName: 'tests' }); - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('/account/balance - returns ft balances', async () => { @@ -83,7 +83,7 @@ describe('/account tests', () => { currency: { decimals: 6, symbol: 'STX' - }, + }, value: '1200' }, { diff --git a/src/tests-rosetta/block-tests.ts b/src/tests-rosetta/block-tests.ts index 1260408c..98e8b150 100644 --- a/src/tests-rosetta/block-tests.ts +++ b/src/tests-rosetta/block-tests.ts @@ -17,7 +17,7 @@ describe('/block tests', () => { process.env.PG_DATABASE = 'postgres'; await cycleMigrations(); db = await PgWriteStore.connect({ usageName: 'tests' }); - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('block/transaction - contract_call contains parsed metadata', async () => { diff --git a/src/tests-tokens/strict-mode-tests.ts b/src/tests-tokens/strict-mode-tests.ts index a979b501..d6f84673 100644 --- a/src/tests-tokens/strict-mode-tests.ts +++ b/src/tests-tokens/strict-mode-tests.ts @@ -115,7 +115,7 @@ describe('token metadata strict mode', () => { process.env.PG_DATABASE = 'postgres'; await cycleMigrations(); db = await PgWriteStore.connect({ usageName: 'tests', withNotifier: false }); - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); process.env['STACKS_API_ENABLE_FT_METADATA'] = '1'; process.env['STACKS_API_ENABLE_NFT_METADATA'] = '1'; diff --git a/src/tests-tokens/tokens-metadata-tests.ts b/src/tests-tokens/tokens-metadata-tests.ts index d4ccf8da..04cd46bc 100644 --- a/src/tests-tokens/tokens-metadata-tests.ts +++ b/src/tests-tokens/tokens-metadata-tests.ts @@ -18,13 +18,14 @@ import * as fs from 'fs'; import { EventStreamServer, startEventServer } from '../event-stream/event-server'; import { getStacksTestnetNetwork } from '../rosetta-helpers'; import { StacksCoreRpcClient } from '../core-rpc/client'; -import { logger, timeout, waiter, Waiter } from '../helpers'; +import { timeout, waiter, Waiter } from '../helpers'; import * as nock from 'nock'; import { PgWriteStore } from '../datastore/pg-write-store'; import { cycleMigrations, runMigrations } from '../datastore/migrations'; import { TokensProcessorQueue } from '../token-metadata/tokens-processor-queue'; import { performFetch } from '../token-metadata/helpers'; import { getPagingQueryLimit, ResourceType } from '../api/pagination'; +import { logger } from '../logger'; const pKey = 'cb3df38053d132895220b9ce471f6b676db5b9bf0b4adefb55f2118ece2478df01'; const stacksNetwork = getStacksTestnetNetwork(); @@ -77,7 +78,7 @@ describe('api tests', () => { const submitResult = await new StacksCoreRpcClient().sendTransaction(serializedTx); return submitResult; } catch (error) { - logger.error('error: ', error); + logger.error(error); } return Promise.resolve({ txId: '' }); } @@ -247,7 +248,7 @@ describe('api tests', () => { 'src/tests-tokens/test-contracts/nft-trait.clar' ); const tx = await standByForTx(contract.txId); - if (tx.status != 1) logger.error('contract deploy error', tx); + if (tx.status != 1) logger.error(tx, 'contract deploy error'); const contract1 = await deployContract( 'beeple', @@ -289,7 +290,7 @@ describe('api tests', () => { ); const tx = await standByForTx(contract.txId); - if (tx.status != 1) logger.error('contract deploy error', tx); + if (tx.status != 1) logger.error(tx, 'contract deploy error'); const contract1 = await deployContract( 'hey-token', diff --git a/src/tests/address-tests.ts b/src/tests/address-tests.ts index 3b4dda74..c92e9281 100644 --- a/src/tests/address-tests.ts +++ b/src/tests/address-tests.ts @@ -56,7 +56,7 @@ describe('address tests', () => { skipMigrations: true, }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('address transaction transfers', async () => { diff --git a/src/tests/block-tests.ts b/src/tests/block-tests.ts index 38927080..29c7d20f 100644 --- a/src/tests/block-tests.ts +++ b/src/tests/block-tests.ts @@ -29,7 +29,7 @@ describe('block tests', () => { skipMigrations: true, }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('info block time', async () => { diff --git a/src/tests/burnchain-tests.ts b/src/tests/burnchain-tests.ts index af033298..b0a3fc9e 100644 --- a/src/tests/burnchain-tests.ts +++ b/src/tests/burnchain-tests.ts @@ -20,7 +20,7 @@ describe('burnchain tests', () => { skipMigrations: true, }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('fetch reward slot holders', async () => { diff --git a/src/tests/cache-control-tests.ts b/src/tests/cache-control-tests.ts index 36204e00..675055bb 100644 --- a/src/tests/cache-control-tests.ts +++ b/src/tests/cache-control-tests.ts @@ -25,7 +25,7 @@ describe('cache-control tests', () => { skipMigrations: true, }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('parse if-none-match header', () => { diff --git a/src/tests/mempool-tests.ts b/src/tests/mempool-tests.ts index 7af50bfa..7f0fa218 100644 --- a/src/tests/mempool-tests.ts +++ b/src/tests/mempool-tests.ts @@ -34,7 +34,7 @@ describe('mempool tests', () => { skipMigrations: true, }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); afterEach(async () => { diff --git a/src/tests/microblock-tests.ts b/src/tests/microblock-tests.ts index 9f7aae41..de91579d 100644 --- a/src/tests/microblock-tests.ts +++ b/src/tests/microblock-tests.ts @@ -20,7 +20,7 @@ import { } from '../datastore/common'; import { startApiServer } from '../api/init'; import { PgSqlClient } from '../datastore/connection'; -import { bufferToHexPrefixString, httpPostRequest, I32_MAX, logger } from '../helpers'; +import { bufferToHexPrefixString, httpPostRequest, I32_MAX } from '../helpers'; import { AddressStxBalanceResponse, AddressStxInboundListResponse, @@ -41,6 +41,7 @@ import { createClarityValueArray } from '../stacks-encoding-helpers'; import { PgWriteStore } from '../datastore/pg-write-store'; import { cycleMigrations, runMigrations } from '../datastore/migrations'; import { getRawEventRequests } from '../datastore/event-requests'; +import { logger } from '../logger'; describe('microblock tests', () => { let db: PgWriteStore; @@ -78,7 +79,6 @@ describe('microblock tests', () => { chainId: ChainID.Mainnet, serverHost: '127.0.0.1', serverPort: 0, - httpLogLevel: 'debug', }); return [eventServer, eventServer.closeAsync] as const; }, @@ -86,7 +86,6 @@ describe('microblock tests', () => { const apiServer = await startApiServer({ datastore: db, chainId: ChainID.Mainnet, - httpLogLevel: 'debug', }); return [apiServer, apiServer.terminate] as const; }, @@ -140,7 +139,6 @@ describe('microblock tests', () => { chainId: ChainID.Mainnet, serverHost: '127.0.0.1', serverPort: 0, - httpLogLevel: 'debug', }); return [eventServer, eventServer.closeAsync] as const; }, @@ -148,7 +146,6 @@ describe('microblock tests', () => { const apiServer = await startApiServer({ datastore: db, chainId: ChainID.Mainnet, - httpLogLevel: 'debug', }); return [apiServer, apiServer.terminate] as const; }, @@ -206,7 +203,6 @@ describe('microblock tests', () => { chainId: ChainID.Mainnet, serverHost: '127.0.0.1', serverPort: 0, - httpLogLevel: 'debug', }); return [eventServer, eventServer.closeAsync] as const; }, @@ -214,7 +210,6 @@ describe('microblock tests', () => { const apiServer = await startApiServer({ datastore: db, chainId: ChainID.Mainnet, - httpLogLevel: 'debug', }); return [apiServer, apiServer.terminate] as const; }, @@ -257,7 +252,6 @@ describe('microblock tests', () => { const apiServer = await startApiServer({ datastore: db, chainId: ChainID.Testnet, - httpLogLevel: 'silly', }); return [apiServer, apiServer.terminate] as const; }, diff --git a/src/tests/other-tests.ts b/src/tests/other-tests.ts index 8c16f34d..7b8f1c43 100644 --- a/src/tests/other-tests.ts +++ b/src/tests/other-tests.ts @@ -32,7 +32,7 @@ describe('other tests', () => { skipMigrations: true, }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('stx-supply', async () => { diff --git a/src/tests/search-tests.ts b/src/tests/search-tests.ts index 4a1bd547..c0f1a2e2 100644 --- a/src/tests/search-tests.ts +++ b/src/tests/search-tests.ts @@ -33,7 +33,7 @@ describe('search tests', () => { skipMigrations: true, }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('search term - hash', async () => { diff --git a/src/tests/smart-contract-tests.ts b/src/tests/smart-contract-tests.ts index 957ac555..825d0d77 100644 --- a/src/tests/smart-contract-tests.ts +++ b/src/tests/smart-contract-tests.ts @@ -28,7 +28,7 @@ describe('smart contract tests', () => { skipMigrations: true, }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('list contract log events', async () => { diff --git a/src/tests/socket-io-tests.ts b/src/tests/socket-io-tests.ts index a09bc3c1..2c381a6b 100644 --- a/src/tests/socket-io-tests.ts +++ b/src/tests/socket-io-tests.ts @@ -31,7 +31,6 @@ describe('socket-io', () => { apiServer = await startApiServer({ datastore: db, chainId: ChainID.Testnet, - httpLogLevel: 'silly', }); }); diff --git a/src/tests/token-tests.ts b/src/tests/token-tests.ts index 8b0ef7ac..d93ee302 100644 --- a/src/tests/token-tests.ts +++ b/src/tests/token-tests.ts @@ -19,7 +19,7 @@ describe('/extended/v1/tokens tests', () => { withNotifier: false, skipMigrations: true, }); - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('/nft/holdings', async () => { diff --git a/src/tests/tx-tests.ts b/src/tests/tx-tests.ts index 43827293..09f18828 100644 --- a/src/tests/tx-tests.ts +++ b/src/tests/tx-tests.ts @@ -59,7 +59,7 @@ describe('tx tests', () => { skipMigrations: true, }); client = db.sql; - api = await startApiServer({ datastore: db, chainId: ChainID.Testnet, httpLogLevel: 'silly' }); + api = await startApiServer({ datastore: db, chainId: ChainID.Testnet }); }); test('fetch tx list details', async () => { diff --git a/src/tests/v2-proxy-tests.ts b/src/tests/v2-proxy-tests.ts index 1ffae985..5e1624de 100644 --- a/src/tests/v2-proxy-tests.ts +++ b/src/tests/v2-proxy-tests.ts @@ -49,7 +49,6 @@ describe('v2-proxy tests', () => { const apiServer = await startApiServer({ datastore: db, chainId: ChainID.Mainnet, - httpLogLevel: 'debug', }); return [apiServer, apiServer.terminate] as const; }, diff --git a/src/tests/websocket-tests.ts b/src/tests/websocket-tests.ts index b4340a92..b1d6f31e 100644 --- a/src/tests/websocket-tests.ts +++ b/src/tests/websocket-tests.ts @@ -44,7 +44,6 @@ describe('websocket notifications', () => { apiServer = await startApiServer({ datastore: db, chainId: ChainID.Testnet, - httpLogLevel: 'silly', }); }); diff --git a/src/token-metadata/tokens-contract-handler.ts b/src/token-metadata/tokens-contract-handler.ts index 23d0f0c3..895048d9 100644 --- a/src/token-metadata/tokens-contract-handler.ts +++ b/src/token-metadata/tokens-contract-handler.ts @@ -12,7 +12,7 @@ import { uintCV, UIntCV, } from '@stacks/transactions'; -import { logger, parseDataUrl, REPO_DIR, stopwatch } from '../helpers'; +import { parseDataUrl, REPO_DIR, stopwatch } from '../helpers'; import * as querystring from 'querystring'; import { getTokenMetadataFetchTimeoutMs, @@ -25,6 +25,7 @@ import { import { ReadOnlyContractCallResponse, StacksCoreRpcClient } from '../core-rpc/client'; import { FetchError } from 'node-fetch'; import { PgWriteStore } from '../datastore/pg-write-store'; +import { logger } from '../logger'; /** * The maximum number of bytes of metadata to fetch. diff --git a/src/token-metadata/tokens-processor-queue.ts b/src/token-metadata/tokens-processor-queue.ts index 8ce25591..cf5d7c2f 100644 --- a/src/token-metadata/tokens-processor-queue.ts +++ b/src/token-metadata/tokens-processor-queue.ts @@ -1,10 +1,11 @@ -import { FoundOrNot, logError, logger } from '../helpers'; +import { FoundOrNot } from '../helpers'; import { Evt } from 'evt'; import PQueue from 'p-queue'; import { DbTokenMetadataQueueEntry, TokenMetadataUpdateInfo } from '../datastore/common'; import { ChainID, ClarityAbi } from '@stacks/transactions'; import { TokensContractHandler } from './tokens-contract-handler'; import { PgWriteStore } from '../datastore/pg-write-store'; +import { logger } from '../logger'; /** * The maximum number of token metadata parsing operations that can be ran concurrently before @@ -155,9 +156,9 @@ export class TokensProcessorQueue { await tokenContractHandler.start(); }) .catch(error => { - logError( - `[token-metadata] error processing token contract: ${tokenContractHandler.contractAddress} ${tokenContractHandler.contractName} from tx ${tokenContractHandler.txId}`, - error + logger.error( + error, + `[token-metadata] error processing token contract: ${tokenContractHandler.contractAddress} ${tokenContractHandler.contractName} from tx ${tokenContractHandler.txId}` ); }) .finally(() => {