feat(runes): add runes api to codebase (#449)

* new code

* add ci

* files

* standard

* Update api/runes/package.json

Co-authored-by: ASuciuX <151519329+ASuciuX@users.noreply.github.com>

---------

Co-authored-by: ASuciuX <151519329+ASuciuX@users.noreply.github.com>
This commit is contained in:
Rafael Cárdenas
2025-02-27 09:06:43 -06:00
committed by GitHub
parent 68158786f0
commit da5596afec
43 changed files with 23657 additions and 1676 deletions

View File

@@ -20,7 +20,7 @@ jobs:
strategy:
fail-fast: false
matrix:
suite: [ordinals]
suite: [ordinals, runes]
runs-on: ubuntu-latest
defaults:
run:
@@ -41,8 +41,9 @@ jobs:
path: |
~/.npm
**/node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
key: ${{ runner.os }}-build-${{ matrix.suite }}-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-build-${{ matrix.suite }}-${{ env.cache-name }}-
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
${{ runner.os }}-
@@ -63,7 +64,7 @@ jobs:
strategy:
fail-fast: false
matrix:
suite: [ordinals]
suite: [ordinals, runes]
defaults:
run:
working-directory: ./api/${{ matrix.suite }}
@@ -86,8 +87,9 @@ jobs:
path: |
~/.npm
**/node_modules
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
key: ${{ runner.os }}-build-${{ matrix.suite }}-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-build-${{ matrix.suite }}-${{ env.cache-name }}-
${{ runner.os }}-build-${{ env.cache-name }}-
${{ runner.os }}-build-
${{ runner.os }}-
@@ -260,7 +262,7 @@ jobs:
strategy:
fail-fast: false
matrix:
suite: [ordinals]
suite: [ordinals, runes]
runs-on: ubuntu-latest
needs: semantic-release
steps:
@@ -285,7 +287,7 @@ jobs:
uses: docker/metadata-action@v5
with:
images: |
hirosystems/${{ matrix.suite }}-api
hirosystems/bitcoin-indexer-${{ matrix.suite }}-api
tags: |
type=ref,event=branch
type=ref,event=pr

21
.vscode/launch.json vendored
View File

@@ -126,5 +126,26 @@
"PGPASSWORD": "postgres",
},
},
{
"type": "node",
"request": "launch",
"name": "test: runes-api",
"program": "${workspaceFolder}/api/runes/node_modules/jest/bin/jest",
"cwd": "${workspaceFolder}/api/runes/",
"args": [
"--testTimeout=3600000",
"--runInBand",
"--no-cache",
],
"outputCapture": "std",
"console": "integratedTerminal",
"preLaunchTask": "npm: testenv:run",
"postDebugTask": "npm: testenv:stop",
"env": {
"PGHOST": "localhost",
"PGUSER": "postgres",
"PGPASSWORD": "postgres",
},
},
]
}

View File

@@ -1,3 +0,0 @@
{
"extends": ["@commitlint/config-conventional"]
}

View File

@@ -1 +0,0 @@
# See src/env.ts for environment variable documentation.

View File

@@ -1,3 +0,0 @@
{
"extends": "semantic-release-monorepo"
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2023 Hiro Systems PBC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -1,5 +0,0 @@
{
"git": {
"deploymentEnabled": false
}
}

2
api/runes/.eslintignore Normal file
View File

@@ -0,0 +1,2 @@
node_modules/
.eslintrc.js

29
api/runes/.eslintrc.js Normal file
View File

@@ -0,0 +1,29 @@
module.exports = {
root: true,
extends: ['@stacks/eslint-config', 'prettier'],
overrides: [],
parser: '@typescript-eslint/parser',
parserOptions: {
tsconfigRootDir: __dirname,
project: './tsconfig.json',
ecmaVersion: 2020,
sourceType: 'module',
},
ignorePatterns: ['*.config.js', 'config/*', '*.mjs', 'tests/*.js', 'client/*'],
plugins: ['@typescript-eslint', 'eslint-plugin-tsdoc', 'prettier'],
rules: {
'prettier/prettier': 'error',
'@typescript-eslint/no-inferrable-types': 'off',
'@typescript-eslint/camelcase': 'off',
'@typescript-eslint/no-empty-function': 'off',
'@typescript-eslint/no-use-before-define': ['error', 'nofunc'],
'@typescript-eslint/no-floating-promises': ['error', { ignoreVoid: true }],
'no-warning-comments': 'warn',
'tsdoc/syntax': 'error',
// TODO: Remove this when `any` abi type is fixed.
'@typescript-eslint/no-unsafe-assignment': 'off',
'@typescript-eslint/no-unsafe-member-access': 'off',
'@typescript-eslint/no-unsafe-call': 'off',
'@typescript-eslint/restrict-template-expressions': 'off',
},
};

1
api/runes/.nvmrc Normal file
View File

@@ -0,0 +1 @@
20

1
api/runes/README.md Normal file
View File

@@ -0,0 +1 @@
# Runes API

200
api/runes/jest.config.js Normal file
View File

@@ -0,0 +1,200 @@
/*
* For a detailed explanation regarding each configuration property, visit:
* https://jestjs.io/docs/configuration
*/
module.exports = {
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after `n` failures
// bail: 0,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "/private/var/folders/v3/swygw5ld38x59y9wtc2qv3fc0000gn/T/jest_dx",
// Automatically clear mock calls, instances, contexts and results before every test
// clearMocks: false,
// Indicates whether the coverage information should be collected while executing the test
// collectCoverage: false,
// An array of glob patterns indicating a set of files for which coverage information should be collected
collectCoverageFrom: [
"src/**/*.ts",
],
// The directory where Jest should output its coverage files
// coverageDirectory: undefined,
// An array of regexp pattern strings used to skip coverage collection
coveragePathIgnorePatterns: [
"/node_modules/",
"/src/@types/"
],
// Indicates which provider should be used to instrument code for coverage
coverageProvider: "v8",
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: undefined,
// A path to a custom dependency extractor
// dependencyExtractor: undefined,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// The default configuration for fake timers
// fakeTimers: {
// "enableGlobally": false
// },
// Force coverage collection from ignored files using an array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
globalSetup: './tests/setup.ts',
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: undefined,
// A set of global variables that need to be available in all test environments
// globals: {},
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
// maxWorkers: "50%",
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
// moduleFileExtensions: [
// "js",
// "mjs",
// "cjs",
// "jsx",
// "ts",
// "tsx",
// "json",
// "node"
// ],
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
// moduleNameMapper: {},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "failure-change",
// A preset that is used as a base for Jest's configuration
preset: 'ts-jest',
// Run tests from one or more projects
// projects: undefined,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state before every test
// resetMocks: false,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: undefined,
// Automatically restore mock state and implementation before every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
rootDir: '',
// A list of paths to directories that Jest should use to search for files in
// roots: [
// "<rootDir>"
// ],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test
// setupFiles: [],
// A list of paths to modules that run some code to configure or set up the testing framework before each test
// setupFilesAfterEnv: [],
// The number of seconds after which a test is considered as slow and reported as such in the results.
// slowTestThreshold: 5,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
// testEnvironment: "jest-environment-node",
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
// testMatch: [
// "**/__tests__/**/*.[jt]s?(x)",
// "**/?(*.)+(spec|test).[tj]s?(x)"
// ],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
testPathIgnorePatterns: [
"/node_modules/",
"/client/",
"/dist/"
],
// The regexp pattern or array of patterns that Jest uses to detect test files
// testRegex: [],
// This option allows the use of a custom results processor
// testResultsProcessor: undefined,
// This option allows use of a custom test runner
// testRunner: "jest-circus/runner",
// A map from regular expressions to paths to transformers
transform: {},
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
// "/node_modules/",
// "\\.pnp\\.[^\\/]+$"
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
// verbose: undefined,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};

21230
api/runes/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

66
api/runes/package.json Normal file
View File

@@ -0,0 +1,66 @@
{
"name": "runes-api",
"version": "0.3.0",
"description": "",
"main": "index.js",
"scripts": {
"build": "rimraf ./dist && tsc --project tsconfig.build.json",
"start": "node dist/src/index.js",
"start-ts": "ts-node ./src/index.ts",
"test": "jest --runInBand",
"generate:openapi": "rimraf ./tmp && node -r ts-node/register ./util/openapi-generator.ts",
"generate:docs": "redoc-cli build --output ./tmp/index.html ./tmp/openapi.yaml",
"generate:git-info": "rimraf .git-info && node_modules/.bin/api-toolkit-git-info",
"generate:vercel": "npm run generate:git-info && npm run generate:openapi && npm run generate:docs",
"lint:eslint": "eslint . --ext .ts,.tsx -f unix",
"lint:prettier": "prettier --check src/**/*.ts tests/**/*.ts",
"lint:unused-exports": "ts-unused-exports tsconfig.json --showLineNumber --excludePathsFromReport=util/*",
"testenv:run": "docker compose -f ../../dockerfiles/docker-compose.dev.postgres.yml up",
"testenv:stop": "docker compose -f ../../dockerfiles/docker-compose.dev.postgres.yml down -v -t 0",
"testenv:logs": "docker compose -f ../../dockerfiles/docker-compose.dev.postgres.yml logs -t -f"
},
"author": "Hiro Systems PBC <engineering@hiro.so> (https://hiro.so)",
"license": "Apache 2.0",
"prettier": "@stacks/prettier-config",
"devDependencies": {
"@commitlint/cli": "^17.4.3",
"@commitlint/config-conventional": "^17.4.3",
"@semantic-release/changelog": "^6.0.3",
"@semantic-release/commit-analyzer": "^10.0.4",
"@semantic-release/git": "^10.0.1",
"@stacks/eslint-config": "^1.2.0",
"@types/jest": "^29.2.4",
"@types/supertest": "^2.0.12",
"@typescript-eslint/eslint-plugin": "^5.46.1",
"@typescript-eslint/parser": "^5.51.0",
"babel-jest": "^29.3.1",
"conventional-changelog-conventionalcommits": "^6.1.0",
"eslint": "^8.29.0",
"eslint-plugin-prettier": "^4.2.1",
"eslint-plugin-tsdoc": "^0.2.17",
"husky": "^8.0.3",
"jest": "^29.3.1",
"prettier": "^2.8.1",
"redoc-cli": "^0.13.20",
"rimraf": "^3.0.2",
"ts-jest": "^29.0.3",
"ts-node": "^10.8.2",
"ts-unused-exports": "^10.0.1",
"typescript": "^4.7.4"
},
"dependencies": {
"@fastify/cors": "^8.0.0",
"@fastify/formbody": "^7.0.1",
"@fastify/multipart": "^7.1.0",
"@fastify/swagger": "^8.3.1",
"@fastify/type-provider-typebox": "3.2.0",
"@hirosystems/api-toolkit": "^1.6.0",
"@types/node": "^18.13.0",
"bignumber.js": "^9.1.2",
"env-schema": "^5.2.1",
"fastify": "4.15.0",
"fastify-metrics": "10.2.0",
"pino": "^8.10.0",
"postgres": "^3.3.4"
}
}

14
api/runes/src/@types/fastify/index.d.ts vendored Normal file
View File

@@ -0,0 +1,14 @@
import fastify from 'fastify';
import { PgStore } from '../../pg/pg-store';
declare module 'fastify' {
export interface FastifyInstance<
HttpServer = Server,
HttpRequest = IncomingMessage,
HttpResponse = ServerResponse,
Logger = FastifyLoggerInstance,
TypeProvider = FastifyTypeProviderDefault
> {
db: PgStore;
}
}

59
api/runes/src/api/init.ts Normal file
View File

@@ -0,0 +1,59 @@
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
import FastifyCors from '@fastify/cors';
import Fastify, { FastifyInstance } from 'fastify';
import FastifyMetrics, { IFastifyMetrics } from 'fastify-metrics';
import { FastifyPluginAsync } from 'fastify';
import { Server } from 'http';
import { PgStore } from '../pg/pg-store';
import { EtchingRoutes } from './routes/etchings';
import { AddressRoutes } from './routes/addresses';
import { TransactionRoutes } from './routes/transactions';
import { BlockRoutes } from './routes/blocks';
import { StatusRoutes } from './routes/status';
import { PINO_LOGGER_CONFIG, isProdEnv } from '@hirosystems/api-toolkit';
export const Api: FastifyPluginAsync<
Record<never, never>,
Server,
TypeBoxTypeProvider
> = async fastify => {
await fastify.register(StatusRoutes);
await fastify.register(EtchingRoutes);
await fastify.register(AddressRoutes);
await fastify.register(TransactionRoutes);
await fastify.register(BlockRoutes);
};
export async function buildApiServer(args: { db: PgStore }) {
const fastify = Fastify({
trustProxy: true,
logger: PINO_LOGGER_CONFIG,
}).withTypeProvider<TypeBoxTypeProvider>();
if (isProdEnv) {
await fastify.register(FastifyMetrics, { endpoint: null });
}
await fastify.register(FastifyCors);
fastify.decorate('db', args.db);
await fastify.register(Api, { prefix: '/runes/v1' });
await fastify.register(Api, { prefix: '/runes' });
return fastify;
}
export async function buildPrometheusServer(args: {
metrics: IFastifyMetrics;
}): Promise<FastifyInstance> {
const promServer = Fastify({
trustProxy: true,
logger: PINO_LOGGER_CONFIG,
});
promServer.route({
url: '/metrics',
method: 'GET',
logLevel: 'info',
handler: async (_, reply) => {
await reply.type('text/plain').send(await args.metrics.client.register.metrics());
},
});
return promServer;
}

View File

@@ -0,0 +1,90 @@
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
import { Type } from '@sinclair/typebox';
import { FastifyPluginCallback } from 'fastify';
import { Server } from 'http';
import {
AddressSchema,
LimitSchema,
OffsetSchema,
BalanceResponseSchema,
ActivityResponseSchema,
} from '../schemas';
import { parseActivityResponse, parseBalanceResponse } from '../util/helpers';
import { Optional, PaginatedResponse } from '@hirosystems/api-toolkit';
import { handleCache } from '../util/cache';
export const AddressRoutes: FastifyPluginCallback<
Record<never, never>,
Server,
TypeBoxTypeProvider
> = (fastify, options, done) => {
fastify.addHook('preHandler', handleCache);
fastify.get(
'/addresses/:address/balances',
{
schema: {
operationId: 'getAddressBalances',
summary: 'Address balances',
description: 'Retrieves a paginated list of address balances',
tags: ['Balances'],
params: Type.Object({
address: AddressSchema,
}),
querystring: Type.Object({
offset: Optional(OffsetSchema),
limit: Optional(LimitSchema),
}),
response: {
200: PaginatedResponse(BalanceResponseSchema, 'Paginated balances response'),
},
},
},
async (request, reply) => {
const offset = request.query.offset ?? 0;
const limit = request.query.limit ?? 20;
const results = await fastify.db.getAddressBalances(request.params.address, offset, limit);
await reply.send({
limit,
offset,
total: results.total,
results: results.results.map(r => parseBalanceResponse(r)),
});
}
);
fastify.get(
'/addresses/:address/activity',
{
schema: {
operationId: 'getAddressActivity',
summary: 'Address activity',
description: 'Retrieves a paginated list of rune activity for an address',
tags: ['Activity'],
params: Type.Object({
address: AddressSchema,
}),
querystring: Type.Object({
offset: Optional(OffsetSchema),
limit: Optional(LimitSchema),
}),
response: {
200: PaginatedResponse(ActivityResponseSchema, 'Paginated activity response'),
},
},
},
async (request, reply) => {
const offset = request.query.offset ?? 0;
const limit = request.query.limit ?? 20;
const results = await fastify.db.getAddressActivity(request.params.address, offset, limit);
await reply.send({
limit,
offset,
total: results.total,
results: results.results.map(r => parseActivityResponse(r)),
});
}
);
done();
};

View File

@@ -0,0 +1,51 @@
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
import { Type } from '@sinclair/typebox';
import { FastifyPluginCallback } from 'fastify';
import { Server } from 'http';
import { LimitSchema, OffsetSchema, ActivityResponseSchema, BlockSchema } from '../schemas';
import { parseActivityResponse } from '../util/helpers';
import { Optional, PaginatedResponse } from '@hirosystems/api-toolkit';
import { handleCache } from '../util/cache';
export const BlockRoutes: FastifyPluginCallback<
Record<never, never>,
Server,
TypeBoxTypeProvider
> = (fastify, options, done) => {
fastify.addHook('preHandler', handleCache);
fastify.get(
'/blocks/:block/activity',
{
schema: {
operationId: 'getBlockActivity',
summary: 'Block activity',
description: 'Retrieves a paginated list of rune activity for a block',
tags: ['Activity'],
params: Type.Object({
block: BlockSchema,
}),
querystring: Type.Object({
offset: Optional(OffsetSchema),
limit: Optional(LimitSchema),
}),
response: {
200: PaginatedResponse(ActivityResponseSchema, 'Paginated activity response'),
},
},
},
async (request, reply) => {
const offset = request.query.offset ?? 0;
const limit = request.query.limit ?? 20;
const results = await fastify.db.getBlockActivity(request.params.block, offset, limit);
await reply.send({
limit,
offset,
total: results.total,
results: results.results.map(r => parseActivityResponse(r)),
});
}
);
done();
};

View File

@@ -0,0 +1,221 @@
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
import { Type } from '@sinclair/typebox';
import { Value } from '@sinclair/typebox/value';
import { FastifyPluginCallback } from 'fastify';
import { Server } from 'http';
import {
AddressSchema,
RuneSchema,
EtchingResponseSchema,
LimitSchema,
NotFoundResponse,
OffsetSchema,
SimpleBalanceResponseSchema,
SimpleActivityResponseSchema,
} from '../schemas';
import { parseBalanceResponse, parseActivityResponse, parseEtchingResponse } from '../util/helpers';
import { Optional, PaginatedResponse } from '@hirosystems/api-toolkit';
import { handleCache } from '../util/cache';
export const EtchingRoutes: FastifyPluginCallback<
Record<never, never>,
Server,
TypeBoxTypeProvider
> = (fastify, options, done) => {
fastify.addHook('preHandler', handleCache);
fastify.get(
'/etchings',
{
schema: {
operationId: 'getEtchings',
summary: 'Rune etchings',
description: 'Retrieves a paginated list of rune etchings',
tags: ['Etchings'],
querystring: Type.Object({
offset: Optional(OffsetSchema),
limit: Optional(LimitSchema),
}),
response: {
200: PaginatedResponse(EtchingResponseSchema, 'Paginated etchings response'),
},
},
},
async (request, reply) => {
const offset = request.query.offset ?? 0;
const limit = request.query.limit ?? 20;
const results = await fastify.db.getRuneEtchings(offset, limit);
await reply.send({
limit,
offset,
total: results.total,
results: results.results.map(r => parseEtchingResponse(r)),
});
}
);
fastify.get(
'/etchings/:etching',
{
schema: {
operationId: 'getEtching',
summary: 'Rune etching',
description: 'Retrieves information for a Rune etching',
tags: ['Etchings'],
params: Type.Object({
etching: RuneSchema,
}),
response: {
200: EtchingResponseSchema,
404: NotFoundResponse,
},
},
},
async (request, reply) => {
const rune = await fastify.db.getRuneEtching(request.params.etching);
if (!rune) {
await reply.code(404).send(Value.Create(NotFoundResponse));
} else {
await reply.send(parseEtchingResponse(rune));
}
}
);
fastify.get(
'/etchings/:etching/activity',
{
schema: {
operationId: 'getRuneActivity',
summary: 'Rune activity',
description: 'Retrieves all activity for a Rune',
tags: ['Activity'],
params: Type.Object({
etching: RuneSchema,
}),
querystring: Type.Object({
offset: Optional(OffsetSchema),
limit: Optional(LimitSchema),
}),
response: {
200: PaginatedResponse(SimpleActivityResponseSchema, 'Paginated activity response'),
},
},
},
async (request, reply) => {
const offset = request.query.offset ?? 0;
const limit = request.query.limit ?? 20;
const results = await fastify.db.getRuneActivity(request.params.etching, offset, limit);
await reply.send({
limit,
offset,
total: results.total,
results: results.results.map(r => parseActivityResponse(r)),
});
}
);
fastify.get(
'/etchings/:etching/activity/:address',
{
schema: {
operationId: 'getRuneAddressActivity',
summary: 'Rune activity for address',
description: 'Retrieves all activity for a Rune address',
tags: ['Activity'],
params: Type.Object({
etching: RuneSchema,
address: AddressSchema,
}),
querystring: Type.Object({
offset: Optional(OffsetSchema),
limit: Optional(LimitSchema),
}),
response: {
200: PaginatedResponse(SimpleActivityResponseSchema, 'Paginated activity response'),
},
},
},
async (request, reply) => {
const offset = request.query.offset ?? 0;
const limit = request.query.limit ?? 20;
const results = await fastify.db.getRuneAddressActivity(
request.params.etching,
request.params.address,
offset,
limit
);
await reply.send({
limit,
offset,
total: results.total,
results: results.results.map(r => parseActivityResponse(r)),
});
}
);
fastify.get(
'/etchings/:etching/holders',
{
schema: {
operationId: 'getRuneHolders',
summary: 'Rune holders',
description: 'Retrieves a paginated list of holders for a Rune',
tags: ['Balances'],
params: Type.Object({
etching: RuneSchema,
}),
querystring: Type.Object({
offset: Optional(OffsetSchema),
limit: Optional(LimitSchema),
}),
response: {
200: PaginatedResponse(SimpleBalanceResponseSchema, 'Paginated holders response'),
},
},
},
async (request, reply) => {
const offset = request.query.offset ?? 0;
const limit = request.query.limit ?? 20;
const results = await fastify.db.getRuneHolders(request.params.etching, offset, limit);
await reply.send({
limit,
offset,
total: results.total,
results: results.results.map(r => parseBalanceResponse(r)),
});
}
);
fastify.get(
'/etchings/:etching/holders/:address',
{
schema: {
operationId: 'getRuneHolderBalance',
summary: 'Rune holder balance',
description: 'Retrieves holder balance for a specific Rune',
tags: ['Balances'],
params: Type.Object({
etching: RuneSchema,
address: AddressSchema,
}),
response: {
404: NotFoundResponse,
200: SimpleBalanceResponseSchema,
},
},
},
async (request, reply) => {
const balance = await fastify.db.getRuneAddressBalance(
request.params.etching,
request.params.address
);
if (!balance) {
await reply.code(404).send(Value.Create(NotFoundResponse));
} else {
await reply.send(parseBalanceResponse(balance));
}
}
);
done();
};

View File

@@ -0,0 +1,42 @@
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
import { FastifyPluginCallback } from 'fastify';
import { Server } from 'http';
import { ApiStatusResponse } from '../schemas';
import { SERVER_VERSION } from '@hirosystems/api-toolkit';
import { handleCache } from '../util/cache';
export const StatusRoutes: FastifyPluginCallback<
Record<never, never>,
Server,
TypeBoxTypeProvider
> = (fastify, options, done) => {
fastify.addHook('preHandler', handleCache);
fastify.get(
'/',
{
schema: {
operationId: 'getApiStatus',
summary: 'API Status',
description: 'Displays the status of the API',
tags: ['Status'],
response: {
200: ApiStatusResponse,
},
},
},
async (request, reply) => {
const result = await fastify.db.sqlTransaction(async sql => {
const block_height = await fastify.db.getChainTipBlockHeight();
return {
server_version: `runes-api ${SERVER_VERSION.tag} (${SERVER_VERSION.branch}:${SERVER_VERSION.commit})`,
status: 'ready',
block_height: block_height ? parseInt(block_height) : undefined,
};
});
await reply.send(result);
}
);
done();
};

View File

@@ -0,0 +1,51 @@
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
import { Type } from '@sinclair/typebox';
import { FastifyPluginCallback } from 'fastify';
import { Server } from 'http';
import { LimitSchema, OffsetSchema, ActivityResponseSchema, TransactionIdSchema } from '../schemas';
import { parseActivityResponse } from '../util/helpers';
import { Optional, PaginatedResponse } from '@hirosystems/api-toolkit';
import { handleCache } from '../util/cache';
export const TransactionRoutes: FastifyPluginCallback<
Record<never, never>,
Server,
TypeBoxTypeProvider
> = (fastify, options, done) => {
fastify.addHook('preHandler', handleCache);
fastify.get(
'/transactions/:tx_id/activity',
{
schema: {
operationId: 'getTransactionActivity',
summary: 'Transaction activity',
description: 'Retrieves a paginated list of rune activity for a transaction',
tags: ['Activity'],
params: Type.Object({
tx_id: TransactionIdSchema,
}),
querystring: Type.Object({
offset: Optional(OffsetSchema),
limit: Optional(LimitSchema),
}),
response: {
200: PaginatedResponse(ActivityResponseSchema, 'Paginated activity response'),
},
},
},
async (request, reply) => {
const offset = request.query.offset ?? 0;
const limit = request.query.limit ?? 20;
const results = await fastify.db.getTransactionActivity(request.params.tx_id, offset, limit);
await reply.send({
limit,
offset,
total: results.total,
results: results.results.map(r => parseActivityResponse(r)),
});
}
);
done();
};

View File

@@ -0,0 +1,386 @@
import { SwaggerOptions } from '@fastify/swagger';
import { Nullable, Optional, SERVER_VERSION } from '@hirosystems/api-toolkit';
import { Static, Type } from '@sinclair/typebox';
import { TypeCompiler } from '@sinclair/typebox/compiler';
export const OpenApiSchemaOptions: SwaggerOptions = {
openapi: {
info: {
title: 'Runes API',
description: `REST API to get information about Runes`,
version: SERVER_VERSION.tag,
},
externalDocs: {
url: 'https://github.com/hirosystems/runehook',
description: 'Source Repository',
},
servers: [
{
url: 'https://api.hiro.so/',
description: 'mainnet',
},
],
tags: [
{
name: 'Etchings',
description: 'Rune etchings',
},
{
name: 'Activity',
description: 'Rune activity',
},
{
name: 'Balances',
description: 'Rune balances',
},
{
name: 'Status',
description: 'API status',
},
],
},
};
export const OffsetSchema = Type.Integer({
minimum: 0,
title: 'Offset',
description: 'Result offset',
});
export type Offset = Static<typeof OffsetSchema>;
export const LimitSchema = Type.Integer({
minimum: 1,
maximum: 60,
title: 'Limit',
description: 'Results per page',
});
export type Limit = Static<typeof LimitSchema>;
const RuneIdSchema = Type.RegEx(/^[0-9]+:[0-9]+$/, { title: 'Rune ID' });
const RuneNumberSchema = Type.RegEx(/^[0-9]+$/, { title: 'Rune number' });
export const RuneNumberSchemaCType = TypeCompiler.Compile(RuneNumberSchema);
const RuneNameSchema = Type.RegEx(/^[A-Z]+$/, { title: 'Rune name' });
export const RuneNameSchemaCType = TypeCompiler.Compile(RuneNameSchema);
const RuneSpacedNameSchema = Type.RegEx(/^[A-Za-z]+(•[A-Za-z]+)+$/, {
title: 'Rune name with spacers',
});
export const RuneSpacedNameSchemaCType = TypeCompiler.Compile(RuneSpacedNameSchema);
export const RuneSchema = Type.Union([
RuneIdSchema,
RuneNumberSchema,
RuneNameSchema,
RuneSpacedNameSchema,
]);
export type Rune = Static<typeof RuneSchema>;
export const AddressSchema = Type.String({
title: 'Address',
description: 'Bitcoin address',
examples: ['bc1p8aq8s3z9xl87e74twfk93mljxq6alv4a79yheadx33t9np4g2wkqqt8kc5'],
});
export type Address = Static<typeof AddressSchema>;
export const TransactionIdSchema = Type.RegEx(/^[a-fA-F0-9]{64}$/, {
title: 'Transaction ID',
description: 'A transaction ID',
examples: ['8f46f0d4ef685e650727e6faf7e30f23b851a7709714ec774f7909b3fb5e604c'],
});
export type TransactionId = Static<typeof TransactionIdSchema>;
// const TransactionOutputSchema = Type.RegEx(/^[a-fA-F0-9]{64}:[0-9]+$/, {
// title: 'Transaction Output',
// description: 'A transaction output',
// examples: ['8f46f0d4ef685e650727e6faf7e30f23b851a7709714ec774f7909b3fb5e604c:0'],
// });
// type TransactionOutput = Static<typeof TransactionOutputSchema>;
const BlockHeightSchema = Type.RegEx(/^[0-9]+$/, {
title: 'Block Height',
description: 'Bitcoin block height',
examples: [777678],
});
export const BlockHeightCType = TypeCompiler.Compile(BlockHeightSchema);
const BlockHashSchema = Type.RegEx(/^[0]{8}[a-fA-F0-9]{56}$/, {
title: 'Block Hash',
description: 'Bitcoin block hash',
examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
});
type BlockHash = Static<typeof BlockHashSchema>;
export const BlockSchema = Type.Union([BlockHeightSchema, BlockHashSchema]);
export type Block = Static<typeof BlockSchema>;
// ==========================
// Responses
// ==========================
export const ApiStatusResponse = Type.Object(
{
server_version: Type.String({ examples: [''] }),
status: Type.String(),
block_height: Optional(Type.Integer()),
},
{ title: 'Api Status Response' }
);
const LocationDetailResponseSchema = Type.Object(
{
block_hash: Type.String({
examples: ['00000000000000000000c9787573a1f1775a2b56b403a2d0c7957e9a5bc754bb'],
title: 'Block hash',
description: 'Bitcoin block hash',
}),
block_height: Type.Integer({
examples: [840000],
title: 'Block height',
description: 'Bitcoin block height',
}),
tx_id: Type.String({
examples: ['2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e'],
title: 'Transaction ID',
description: 'Bitcoin transaction ID',
}),
tx_index: Type.Integer({
examples: [1],
title: 'Transaction Index',
description: 'Index of this transaction in its Bitcoin block',
}),
vout: Optional(
Type.Integer({
examples: [100],
title: 'Output number',
description: 'Bitcoin transaction output number',
})
),
output: Optional(
Type.String({
examples: ['2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e:100'],
title: 'Transaction output',
description: 'Bitcoin transaction output',
})
),
timestamp: Type.Integer({
examples: [1713571767],
title: 'Timestamp',
description: 'Bitcoin transaction timestamp',
}),
},
{
title: 'Transaction location',
description: 'Location of the transaction which confirmed this operation',
}
);
const RuneIdResponseSchema = Type.String({
title: 'ID',
description: 'Rune ID',
examples: ['840000:1'],
});
const RuneNameResponseSchema = Type.String({
title: 'Name',
description: 'Rune name',
examples: ['ZZZZZFEHUZZZZZ'],
});
const RuneSpacedNameResponseSchema = Type.String({
title: 'Spaced name',
description: 'Rune name with spacers',
examples: ['Z•Z•Z•Z•Z•FEHU•Z•Z•Z•Z•Z'],
});
const RuneNumberResponseSchema = Type.Integer({
title: 'Number',
description: 'Rune number',
examples: [1],
});
export const EtchingResponseSchema = Type.Object({
id: RuneIdResponseSchema,
name: RuneNameResponseSchema,
spaced_name: RuneSpacedNameResponseSchema,
number: RuneNumberResponseSchema,
divisibility: Type.Integer({
title: 'Divisibility',
description: 'Rune decimal places',
examples: [2],
}),
symbol: Type.String({ title: 'Symbol', description: 'Rune symbol', examples: ['ᚠ'] }),
turbo: Type.Boolean({ title: 'Turbo', description: 'Rune upgradeability', examples: [false] }),
mint_terms: Type.Object(
{
amount: Nullable(
Type.String({
examples: ['100'],
title: 'Mint amount',
description: 'Amount awarded per mint',
})
),
cap: Nullable(
Type.String({
examples: ['1111111'],
title: 'Mint cap',
description: 'Maximum number of mints allowed',
})
),
height_start: Nullable(
Type.Integer({
examples: [840000],
title: 'Mint block height start',
description: 'Block height at which the mint period opens',
})
),
height_end: Nullable(
Type.Integer({
examples: [1050000],
title: 'Mint block height end',
description: 'Block height at which the mint period closes',
})
),
offset_start: Nullable(
Type.Integer({
examples: [0],
title: 'Mint block height offset start',
description: 'Block height etching offset at which the mint period opens',
})
),
offset_end: Nullable(
Type.Integer({
examples: [200],
title: 'Mint block height offset end',
description: 'Block height etching offset at which the mint period closes',
})
),
},
{ title: 'Mint terms', description: 'Rune mint terms' }
),
supply: Type.Object(
{
current: Type.String({
examples: ['11274916350'],
title: 'Current supply',
description: 'Circulating supply including mints, burns and premine',
}),
minted: Type.String({
examples: ['274916100'],
title: 'Minted amount',
description: 'Total minted amount',
}),
total_mints: Type.String({
examples: ['250'],
title: 'Total mints',
description: 'Number of mints for this rune',
}),
mint_percentage: Type.String({
examples: ['59.4567'],
title: 'Mint percentage',
description: 'Percentage of mints that have been claimed',
}),
mintable: Type.Boolean({
title: 'Mintable',
description: 'Whether or not this rune is mintable at this time',
}),
burned: Type.String({
examples: ['5100'],
title: 'Burned amount',
description: 'Total burned amount',
}),
total_burns: Type.String({
examples: ['17'],
title: 'Total burns',
description: 'Number of burns for this rune',
}),
premine: Type.String({
examples: ['11000000000'],
title: 'Premine amount',
description: 'Amount premined for this rune',
}),
},
{ title: 'Supply information', description: 'Rune supply information' }
),
location: LocationDetailResponseSchema,
});
export type EtchingResponse = Static<typeof EtchingResponseSchema>;
const RuneDetailResponseSchema = Type.Object({
rune: Type.Object(
{
id: RuneIdResponseSchema,
number: RuneNumberResponseSchema,
name: RuneNameResponseSchema,
spaced_name: RuneSpacedNameResponseSchema,
},
{ title: 'Rune detail', description: 'Details of the rune affected by this activity' }
),
});
export const SimpleActivityResponseSchema = Type.Object({
address: Optional(
Type.String({
examples: ['bc1q7jd477wc5s88hsvenr0ddtatsw282hfjzg59wz'],
title: 'Address',
description: 'Bitcoin address which initiated this activity',
})
),
receiver_address: Optional(
Type.String({
examples: ['bc1pgdrveee2v4ez95szaakw5gkd8eennv2dddf9rjdrlt6ch56lzrrsxgvazt'],
title: 'Receiver address',
description: 'Bitcoin address which is receiving rune balance',
})
),
amount: Optional(
Type.String({
examples: ['11000000000'],
title: 'Amount',
description: 'Rune amount relevat to this activity',
})
),
operation: Type.Union(
[
Type.Literal('etching'),
Type.Literal('mint'),
Type.Literal('burn'),
Type.Literal('send'),
Type.Literal('receive'),
],
{ title: 'Operation', description: 'Type of operation described in this activity' }
),
location: LocationDetailResponseSchema,
});
export const ActivityResponseSchema = Type.Intersect([
RuneDetailResponseSchema,
SimpleActivityResponseSchema,
]);
export type ActivityResponse = Static<typeof ActivityResponseSchema>;
export const SimpleBalanceResponseSchema = Type.Object({
address: Optional(
Type.String({
examples: ['bc1q7jd477wc5s88hsvenr0ddtatsw282hfjzg59wz'],
title: 'Address',
description: 'Bitcoin address which holds this balance',
})
),
balance: Type.String({
examples: ['11000000000'],
title: 'Balance',
description: 'Rune balance',
}),
});
export const BalanceResponseSchema = Type.Intersect([
RuneDetailResponseSchema,
SimpleBalanceResponseSchema,
]);
export type BalanceResponse = Static<typeof BalanceResponseSchema>;
export const NotFoundResponse = Type.Object(
{
error: Type.Literal('Not found'),
},
{ title: 'Not Found Response' }
);

View File

@@ -0,0 +1,14 @@
import { CACHE_CONTROL_MUST_REVALIDATE, parseIfNoneMatchHeader } from '@hirosystems/api-toolkit';
import { FastifyReply, FastifyRequest } from 'fastify';
export async function handleCache(request: FastifyRequest, reply: FastifyReply) {
const ifNoneMatch = parseIfNoneMatchHeader(request.headers['if-none-match']);
const etag = await request.server.db.getChainTipEtag();
if (etag) {
if (ifNoneMatch && ifNoneMatch.includes(etag)) {
await reply.header('Cache-Control', CACHE_CONTROL_MUST_REVALIDATE).code(304).send();
} else {
void reply.headers({ 'Cache-Control': CACHE_CONTROL_MUST_REVALIDATE, ETag: `"${etag}"` });
}
}
}

View File

@@ -0,0 +1,102 @@
import BigNumber from 'bignumber.js';
import { DbBalance, DbItemWithRune, DbLedgerEntry, DbRuneWithChainTip } from '../../pg/types';
import { EtchingResponse, ActivityResponse, BalanceResponse } from '../schemas';
function divisibility(num: string | BigNumber, decimals: number): string {
return new BigNumber(num).shiftedBy(-1 * decimals).toFixed(decimals);
}
export function parseEtchingResponse(rune: DbRuneWithChainTip): EtchingResponse {
let mintable = true;
const minted = rune.minted == null ? '0' : rune.minted;
const total_mints = rune.total_mints == null ? '0' : rune.total_mints;
const burned = rune.burned == null ? '0' : rune.burned;
const total_burns = rune.total_burns == null ? '0' : rune.total_burns;
if (
rune.terms_amount == null ||
rune.cenotaph ||
(rune.terms_cap && BigNumber(total_mints).gte(rune.terms_cap)) ||
(rune.terms_height_start && BigNumber(rune.chain_tip).lt(rune.terms_height_start)) ||
(rune.terms_height_end && BigNumber(rune.chain_tip).gt(rune.terms_height_end)) ||
(rune.terms_offset_start &&
BigNumber(rune.chain_tip).lt(BigNumber(rune.block_height).plus(rune.terms_offset_start))) ||
(rune.terms_offset_end &&
BigNumber(rune.chain_tip).gt(BigNumber(rune.block_height).plus(rune.terms_offset_end)))
) {
mintable = false;
}
return {
id: rune.id,
number: rune.number,
name: rune.name,
spaced_name: rune.spaced_name,
divisibility: rune.divisibility,
symbol: rune.symbol,
mint_terms: {
amount: rune.terms_amount ? divisibility(rune.terms_amount, rune.divisibility) : null,
cap: rune.terms_cap ? divisibility(rune.terms_cap, rune.divisibility) : null,
height_start: rune.terms_height_start ? parseInt(rune.terms_height_start) : null,
height_end: rune.terms_height_end ? parseInt(rune.terms_height_end) : null,
offset_start: rune.terms_offset_start ? parseInt(rune.terms_offset_start) : null,
offset_end: rune.terms_offset_end ? parseInt(rune.terms_offset_end) : null,
},
supply: {
premine: divisibility(rune.premine, rune.divisibility),
current: divisibility(BigNumber(minted).plus(burned).plus(rune.premine), rune.divisibility),
minted: divisibility(minted, rune.divisibility),
total_mints,
burned: divisibility(burned, rune.divisibility),
total_burns,
mint_percentage:
rune.terms_cap != null && rune.terms_cap != '0'
? BigNumber(total_mints).div(rune.terms_cap).times(100).toFixed(4)
: '0.0000',
mintable,
},
turbo: rune.turbo,
location: {
block_hash: rune.block_hash,
block_height: parseInt(rune.block_height),
tx_index: rune.tx_index,
tx_id: rune.tx_id,
timestamp: rune.timestamp,
},
};
}
export function parseActivityResponse(entry: DbItemWithRune<DbLedgerEntry>): ActivityResponse {
return {
rune: {
id: entry.rune_id,
number: entry.number,
name: entry.name,
spaced_name: entry.spaced_name,
},
operation: entry.operation,
address: entry.address ?? undefined,
receiver_address: entry.receiver_address ?? undefined,
amount: entry.amount ? divisibility(entry.amount, entry.divisibility) : undefined,
location: {
block_hash: entry.block_hash,
block_height: parseInt(entry.block_height),
tx_index: entry.tx_index,
tx_id: entry.tx_id,
vout: entry.output ?? undefined,
output: entry.output ? `${entry.tx_id}:${entry.output}` : undefined,
timestamp: entry.timestamp,
},
};
}
export function parseBalanceResponse(item: DbItemWithRune<DbBalance>): BalanceResponse {
return {
rune: {
id: item.rune_id,
number: item.number,
name: item.name,
spaced_name: item.spaced_name,
},
address: item.address,
balance: divisibility(item.balance, item.divisibility),
};
}

29
api/runes/src/env.ts Normal file
View File

@@ -0,0 +1,29 @@
import { Static, Type } from '@sinclair/typebox';
import envSchema from 'env-schema';
const schema = Type.Object({
/** Hostname of the API server */
API_HOST: Type.String({ default: '0.0.0.0' }),
/** Port in which to serve the API */
API_PORT: Type.Number({ default: 3000, minimum: 0, maximum: 65535 }),
/** Port in which to serve the Admin RPC interface */
ADMIN_RPC_PORT: Type.Number({ default: 3001, minimum: 0, maximum: 65535 }),
RUNES_PGHOST: Type.String(),
RUNES_PGPORT: Type.Number({ default: 5432, minimum: 0, maximum: 65535 }),
RUNES_PGUSER: Type.String(),
RUNES_PGPASSWORD: Type.String(),
RUNES_PGDATABASE: Type.String(),
/** Limit to how many concurrent connections can be created */
PG_CONNECTION_POOL_MAX: Type.Number({ default: 10 }),
PG_IDLE_TIMEOUT: Type.Number({ default: 30 }),
PG_MAX_LIFETIME: Type.Number({ default: 60 }),
PG_STATEMENT_TIMEOUT: Type.Number({ default: 60_000 }),
});
type Env = Static<typeof schema>;
export const ENV = envSchema<Env>({
schema: schema,
dotenv: true,
});

55
api/runes/src/index.ts Normal file
View File

@@ -0,0 +1,55 @@
import { isProdEnv, logger, registerShutdownConfig } from '@hirosystems/api-toolkit';
import { buildApiServer, buildPrometheusServer } from './api/init';
import { ENV } from './env';
import { PgStore } from './pg/pg-store';
import { ApiMetrics } from './metrics/metrics';
async function initApiService(db: PgStore) {
logger.info('Initializing API service...');
const fastify = await buildApiServer({ db });
registerShutdownConfig({
name: 'API Server',
forceKillable: false,
handler: async () => {
await fastify.close();
},
});
await fastify.listen({ host: ENV.API_HOST, port: ENV.API_PORT });
if (isProdEnv) {
const promServer = await buildPrometheusServer({ metrics: fastify.metrics });
registerShutdownConfig({
name: 'Prometheus Server',
forceKillable: false,
handler: async () => {
await promServer.close();
},
});
ApiMetrics.configure(db);
await promServer.listen({ host: ENV.API_HOST, port: 9153 });
}
}
async function initApp() {
const db = await PgStore.connect();
await initApiService(db);
registerShutdownConfig({
name: 'DB',
forceKillable: false,
handler: async () => {
await db.close();
},
});
}
registerShutdownConfig();
initApp()
.then(() => {
logger.info('App initialized');
})
.catch(error => {
logger.error(error, `App failed to start`);
process.exit(1);
});

View File

@@ -0,0 +1,22 @@
import * as prom from 'prom-client';
import { PgStore } from '../pg/pg-store';
export class ApiMetrics {
/** The most recent Bitcoin block height ingested by the API */
readonly runes_api_block_height: prom.Gauge;
static configure(db: PgStore): ApiMetrics {
return new ApiMetrics(db);
}
private constructor(db: PgStore) {
this.runes_api_block_height = new prom.Gauge({
name: `runes_api_block_height`,
help: 'The most recent Bitcoin block height ingested by the API',
async collect() {
const height = await db.getChainTipBlockHeight();
this.set(parseInt(height ?? '0'));
},
});
}
}

View File

@@ -0,0 +1,281 @@
import {
BasePgStore,
PgConnectionVars,
PgSqlClient,
PgSqlQuery,
connectPostgres,
} from '@hirosystems/api-toolkit';
import { ENV } from '../env';
import {
DbBalance,
DbCountedQueryResult,
DbItemWithRune,
DbLedgerEntry,
DbPaginatedResult,
DbRuneWithChainTip,
} from './types';
import {
Address,
BlockHeightCType,
Block,
Rune,
Limit,
Offset,
RuneNameSchemaCType,
RuneSpacedNameSchemaCType,
TransactionId,
RuneNumberSchemaCType,
} from '../api/schemas';
function runeFilter(sql: PgSqlClient, etching: string, prefix?: string): PgSqlQuery {
const p = prefix ? `${prefix}.` : '';
let filter = sql`${sql(`${p}id`)} = ${etching}`;
if (RuneNameSchemaCType.Check(etching)) {
filter = sql`${sql(`${p}name`)} = ${etching}`;
} else if (RuneSpacedNameSchemaCType.Check(etching)) {
filter = sql`${sql(`${p}spaced_name`)} = ${etching}`;
} else if (RuneNumberSchemaCType.Check(etching)) {
filter = sql`${sql(`${p}number`)} = ${etching}`;
}
return filter;
}
function blockFilter(sql: PgSqlClient, block: string, prefix?: string): PgSqlQuery {
const p = prefix ? `${prefix}.` : '';
let filter = sql`${sql(`${p}block_hash`)} = ${block}`;
if (BlockHeightCType.Check(block)) {
filter = sql`${sql(`${p}block_height`)} = ${block}`;
}
return filter;
}
export class PgStore extends BasePgStore {
static async connect(): Promise<PgStore> {
const pgConfig: PgConnectionVars = {
host: ENV.RUNES_PGHOST,
port: ENV.RUNES_PGPORT,
user: ENV.RUNES_PGUSER,
password: ENV.RUNES_PGPASSWORD,
database: ENV.RUNES_PGDATABASE,
};
const sql = await connectPostgres({
usageName: 'runes-api-pg-store',
connectionArgs: pgConfig,
connectionConfig: {
poolMax: ENV.PG_CONNECTION_POOL_MAX,
idleTimeout: ENV.PG_IDLE_TIMEOUT,
maxLifetime: ENV.PG_MAX_LIFETIME,
statementTimeout: ENV.PG_STATEMENT_TIMEOUT,
},
});
return new PgStore(sql);
}
constructor(sql: PgSqlClient) {
super(sql);
}
async getChainTipEtag(): Promise<string | undefined> {
const result = await this.sql<{ etag: string }[]>`
SELECT block_hash AS etag FROM ledger ORDER BY block_height DESC LIMIT 1
`;
return result[0]?.etag;
}
async getChainTipBlockHeight(): Promise<string | undefined> {
const result = await this.sql<{ block_height: string }[]>`
SELECT block_height FROM ledger ORDER BY block_height DESC LIMIT 1
`;
return result[0]?.block_height;
}
private async getEtchings(
id?: Rune,
offset: Offset = 0,
limit: Limit = 1
): Promise<DbPaginatedResult<DbRuneWithChainTip>> {
const results = await this.sql<DbCountedQueryResult<DbRuneWithChainTip>[]>`
WITH
rune_count AS (SELECT COALESCE(MAX(number), 0) + 1 AS total FROM runes),
max AS (SELECT MAX(block_height) AS chain_tip FROM ledger),
results AS (
SELECT *
FROM runes
${id ? this.sql`WHERE ${runeFilter(this.sql, id)}` : this.sql``}
ORDER BY block_height DESC, tx_index DESC
OFFSET ${offset} LIMIT ${limit}
),
recent_supplies AS (
SELECT DISTINCT ON (rune_id) *
FROM supply_changes
WHERE rune_id IN (SELECT id FROM results)
ORDER BY rune_id, block_height DESC
)
SELECT r.*, s.minted, s.total_mints, s.burned, s.total_burns,
(SELECT total FROM rune_count), (SELECT chain_tip FROM max)
FROM results AS r
INNER JOIN recent_supplies AS s ON r.id = s.rune_id
ORDER BY r.block_height DESC, r.tx_index DESC
`;
return {
total: results[0]?.total ?? 0,
results,
};
}
async getRuneEtching(id: Rune): Promise<DbRuneWithChainTip | undefined> {
const result = await this.getEtchings(id);
if (result.total == 0) return undefined;
return result.results[0];
}
async getRuneEtchings(
offset: Offset,
limit: Limit
): Promise<DbPaginatedResult<DbRuneWithChainTip>> {
return this.getEtchings(undefined, offset, limit);
}
private async getActivity(
filter: PgSqlQuery,
count: PgSqlQuery,
offset: Offset,
limit: Limit,
cte?: PgSqlQuery
): Promise<DbPaginatedResult<DbItemWithRune<DbLedgerEntry>>> {
const results = await this.sql<DbCountedQueryResult<DbItemWithRune<DbLedgerEntry>>[]>`
WITH ${cte ? cte : this.sql`none AS (SELECT NULL)`},
results AS (
SELECT l.*, r.name, r.number, r.spaced_name, r.divisibility, ${count} AS total
FROM ledger AS l
INNER JOIN runes AS r ON r.id = l.rune_id
WHERE ${filter}
)
SELECT * FROM results
ORDER BY block_height DESC, tx_index DESC, event_index DESC
OFFSET ${offset} LIMIT ${limit}
`;
return {
total: results[0]?.total ?? 0,
results,
};
}
async getRuneActivity(runeId: Rune, offset: Offset, limit: Limit) {
return this.getActivity(
runeFilter(this.sql, runeId, 'r'),
this.sql`COALESCE((SELECT total_operations FROM count), 0)`,
offset,
limit,
this.sql`count AS (
SELECT total_operations FROM supply_changes
WHERE rune_id = (SELECT id FROM runes WHERE ${runeFilter(this.sql, runeId)})
ORDER BY block_height DESC LIMIT 1
)`
);
}
async getRuneAddressActivity(runeId: Rune, address: Address, offset: Offset, limit: Limit) {
return this.getActivity(
this.sql`${runeFilter(this.sql, runeId, 'r')} AND address = ${address}`,
this.sql`COUNT(*) OVER()`,
offset,
limit
);
}
async getAddressActivity(address: Address, offset: Offset, limit: Limit) {
return this.getActivity(
this.sql`address = ${address}`,
this.sql`COALESCE((SELECT total_operations FROM count), 0)`,
offset,
limit,
this.sql`recent AS (
SELECT DISTINCT ON (rune_id) total_operations
FROM balance_changes
WHERE address = ${address}
ORDER BY rune_id, block_height DESC
),
count AS (
SELECT SUM(total_operations) AS total_operations FROM recent
)`
);
}
async getTransactionActivity(txId: TransactionId, offset: Offset, limit: Limit) {
return this.getActivity(this.sql`l.tx_id = ${txId}`, this.sql`COUNT(*) OVER()`, offset, limit);
}
async getBlockActivity(block: Block, offset: Offset, limit: Limit) {
return this.getActivity(
blockFilter(this.sql, block, 'l'),
this.sql`COUNT(*) OVER()`,
offset,
limit
);
}
async getRuneHolders(
id: Rune,
offset: Offset,
limit: Limit
): Promise<DbPaginatedResult<DbItemWithRune<DbBalance>>> {
const results = await this.sql<DbCountedQueryResult<DbItemWithRune<DbBalance>>[]>`
WITH grouped AS (
SELECT DISTINCT ON (b.address) b.address, b.balance, b.total_operations, b.rune_id, r.name, r.number
r.spaced_name, r.divisibility, COUNT(*) OVER() AS total
FROM balance_changes AS b
INNER JOIN runes AS r ON r.id = b.rune_id
WHERE ${runeFilter(this.sql, id, 'r')}
ORDER BY b.address, b.block_height DESC
)
SELECT * FROM grouped
ORDER BY balance DESC
OFFSET ${offset} LIMIT ${limit}
`;
return {
total: results[0]?.total ?? 0,
results,
};
}
async getRuneAddressBalance(
id: Rune,
address: Address
): Promise<DbItemWithRune<DbBalance> | undefined> {
const results = await this.sql<DbItemWithRune<DbBalance>[]>`
SELECT b.rune_id, b.address, b.balance, b.total_operations, r.name,
r.number, r.spaced_name, r.divisibility, COUNT(*) OVER() AS total
FROM balance_changes AS b
INNER JOIN runes AS r ON r.id = b.rune_id
WHERE ${runeFilter(this.sql, id, 'r')} AND address = ${address}
ORDER BY b.block_height DESC
LIMIT 1
`;
return results[0];
}
async getAddressBalances(
address: Address,
offset: Offset,
limit: Limit
): Promise<DbPaginatedResult<DbItemWithRune<DbBalance>>> {
const results = await this.sql<DbCountedQueryResult<DbItemWithRune<DbBalance>>[]>`
WITH grouped AS (
SELECT DISTINCT ON (b.rune_id) b.address, b.balance, b.total_operations, b.rune_id, r.name,
r.number, r.spaced_name, r.divisibility, COUNT(*) OVER() AS total
FROM balance_changes AS b
INNER JOIN runes AS r ON r.id = b.rune_id
WHERE address = ${address}
ORDER BY b.rune_id, b.block_height DESC
)
SELECT * FROM grouped
ORDER BY balance DESC
OFFSET ${offset} LIMIT ${limit}
`;
return {
total: results[0]?.total ?? 0,
results,
};
}
}

67
api/runes/src/pg/types.ts Normal file
View File

@@ -0,0 +1,67 @@
export type DbPaginatedResult<T> = {
total: number;
results: T[];
};
export type DbCountedQueryResult<T> = T & { total: number };
export type DbRune = {
id: string;
number: number;
name: string;
spaced_name: string;
block_hash: string;
block_height: string;
tx_index: number;
tx_id: string;
divisibility: number;
premine: string;
symbol: string;
cenotaph: boolean;
terms_amount: string | null;
terms_cap: string | null;
terms_height_start: string | null;
terms_height_end: string | null;
terms_offset_start: string | null;
terms_offset_end: string | null;
turbo: boolean;
minted: string | null;
total_mints: string | null;
burned: string | null;
total_burns: string | null;
total_operations: string | null;
timestamp: number;
};
export type DbRuneWithChainTip = DbRune & { chain_tip: string };
type DbLedgerOperation = 'etching' | 'mint' | 'burn' | 'send' | 'receive';
export type DbLedgerEntry = {
rune_id: string;
block_hash: string;
block_height: string;
tx_index: number;
tx_id: string;
output: number | null;
address: string | null;
receiver_address: string | null;
amount: string | null;
operation: DbLedgerOperation;
timestamp: number;
};
export type DbItemWithRune<T> = T & {
name: string;
number: number;
spaced_name: string;
divisibility: number;
total_operations: number;
};
export type DbBalance = {
rune_id: string;
address: string;
balance: string;
total_operations: number;
};

View File

@@ -0,0 +1,150 @@
import { ENV } from '../../src/env';
import { PgStore } from '../../src/pg/pg-store';
import {
insertDbLedgerEntry,
insertRune,
sampleRune,
runMigrations,
startTestApiServer,
TestFastifyServer,
insertSupplyChange,
sampleLedgerEntry,
clearDb,
} from '../helpers';
describe('Endpoints', () => {
let db: PgStore;
let fastify: TestFastifyServer;
const rune = sampleRune('1:1', 'Sample Rune');
const ledgerEntry = sampleLedgerEntry(rune.id);
beforeEach(async () => {
db = await PgStore.connect();
fastify = await startTestApiServer(db);
await runMigrations(db.sql);
await insertRune(db, rune);
const event_index = 0;
await insertDbLedgerEntry(db, ledgerEntry, event_index);
await insertSupplyChange(db, rune.id, 1);
});
afterEach(async () => {
if (fastify) {
await fastify.close();
}
await clearDb(db.sql);
await db.close();
});
describe('Etchings', () => {
test('lists runes', async () => {
const expected = {
divisibility: 0,
id: '1:1',
location: {
block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5',
block_height: 840000,
timestamp: 0,
tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e',
tx_index: 1,
},
mint_terms: {
amount: '100',
cap: '5000000',
height_end: null,
height_start: null,
offset_end: null,
offset_start: null,
},
name: 'Sample Rune',
number: 1,
spaced_name: 'Sample•Rune',
supply: {
burned: '0',
current: '0',
mint_percentage: '0.0000',
mintable: false,
minted: '0',
premine: '0',
total_burns: '0',
total_mints: '0',
},
symbol: 'ᚠ',
turbo: false,
};
const runesResponse = await fastify.inject({
method: 'GET',
url: '/runes/v1/etchings',
});
expect(runesResponse.statusCode).toBe(200);
expect(runesResponse.json().results).not.toHaveLength(0);
const response = await fastify.inject({
method: 'GET',
url: '/runes/v1/etchings/' + ledgerEntry.rune_id,
});
expect(response.statusCode).toBe(200);
expect(response.json()).toStrictEqual(expected);
});
test('can fetch by spaced name', async () => {
const url = '/runes/v1/etchings/' + rune.spaced_name;
const response = await fastify.inject({
method: 'GET',
url: url,
});
expect(response.statusCode).toBe(200);
expect(response.json().spaced_name).toEqual(rune.spaced_name);
});
test('can not fetch by spaced name if lacking bullets', async () => {
const url = '/runes/v1/etchings/' + rune.spaced_name.replaceAll('•', '-');
const response = await fastify.inject({
method: 'GET',
url: url,
});
expect(response.statusCode).toBe(400);
});
});
describe('Transactions', () => {
test('shows details', async () => {
const expected = {
limit: 20,
offset: 0,
results: [
{
address: '0',
amount: '0',
location: {
block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5',
block_height: 840000,
output: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e:0',
timestamp: 0,
tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e',
tx_index: 0,
vout: 0,
},
operation: 'etching',
receiver_address: '0',
rune: {
id: '1:1',
name: 'Sample Rune',
number: 1,
spaced_name: 'Sample•Rune',
},
},
],
total: 1,
};
const txid = ledgerEntry.tx_id;
const response = await fastify.inject({
method: 'GET',
url: '/runes/v1/transactions/' + txid + '/activity',
});
expect(response.statusCode).toBe(200);
expect(response.json()).toStrictEqual(expected);
});
});
});

208
api/runes/tests/helpers.ts Normal file
View File

@@ -0,0 +1,208 @@
import { readdirSync } from 'fs';
import { PgStore } from '../src/pg/pg-store';
import { FastifyBaseLogger, FastifyInstance } from 'fastify';
import { IncomingMessage, Server, ServerResponse } from 'http';
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
import { buildApiServer } from '../src/api/init';
import * as fs from 'fs';
import * as path from 'path';
import { DbLedgerEntry, DbRune } from '../src/pg/types';
import { PgSqlClient } from '@hirosystems/api-toolkit';
export type TestFastifyServer = FastifyInstance<
Server,
IncomingMessage,
ServerResponse,
FastifyBaseLogger,
TypeBoxTypeProvider
>;
export async function startTestApiServer(db: PgStore): Promise<TestFastifyServer> {
return await buildApiServer({ db });
}
const RUNES_MIGRATIONS_DIR = '../../migrations/runes';
/// Runs SQL migrations based on the Rust `refinery` crate standard.
export async function runMigrations(sql: PgSqlClient) {
const files = fs.readdirSync(RUNES_MIGRATIONS_DIR);
const sqlFiles = files
.filter(file => path.extname(file).toLowerCase() === '.sql')
.map(file => path.join(RUNES_MIGRATIONS_DIR, file))
.sort((a, b) => {
const numA = parseInt(a.match(/\d+/)?.toString() || '0', 10);
const numB = parseInt(b.match(/\d+/)?.toString() || '0', 10);
return numA - numB;
});
for (const sqlFile of sqlFiles) await sql.file(sqlFile);
return sqlFiles;
}
/// Drops all tables and types from a test DB. Equivalent to a migration rollback, which are
/// unsupported by the `refinery` crate.
export async function clearDb(sql: PgSqlClient) {
await sql`
DO $$ DECLARE
r RECORD;
BEGIN
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP
EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE';
END LOOP;
END $$;
`;
await sql`
DO $$ DECLARE
r RECORD;
BEGIN
FOR r IN (SELECT typname FROM pg_type WHERE typtype = 'e' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = current_schema())) LOOP
EXECUTE 'DROP TYPE IF EXISTS ' || quote_ident(r.typname) || ' CASCADE';
END LOOP;
END $$;
`;
}
export function sampleLedgerEntry(rune_id: string, block_height?: string): DbLedgerEntry {
return {
rune_id: '1:1',
block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5',
block_height: block_height || '840000',
tx_index: 0,
tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e',
output: 0,
address: '0',
receiver_address: '0',
amount: '0',
operation: 'etching',
timestamp: 0,
};
}
function toSpacedName(name: string | null): string | null {
if (name === null) {
return null;
}
// should take "Some name" and make it "Some•name"
const words = name.split(' ');
return words.join('•');
}
export function sampleRune(id: string, name?: string): DbRune {
return {
id: '1:1',
name: name || 'SAMPLERUNENAME',
spaced_name: (name && toSpacedName(name)) || 'SAMPLE•RUNE•NAME',
number: 1,
block_hash: '0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5',
block_height: '840000',
tx_index: 1,
tx_id: '2bb85f4b004be6da54f766c17c1e855187327112c231ef2ff35ebad0ea67c69e',
divisibility: 2,
premine: '1000',
symbol: 'ᚠ',
cenotaph: true,
terms_amount: '100',
terms_cap: '5000000',
terms_height_start: null,
terms_height_end: null,
terms_offset_start: null,
terms_offset_end: null,
turbo: false,
minted: '1000',
total_mints: '1500',
burned: '500',
total_burns: '750',
total_operations: '1',
timestamp: 1713571767,
};
}
export async function insertDbLedgerEntry(
db: PgStore,
payload: DbLedgerEntry,
event_index: number
): Promise<void> {
await db.sqlWriteTransaction(async sql => {
const {
rune_id,
block_hash,
block_height,
tx_index,
tx_id,
output,
address,
receiver_address,
amount,
operation,
} = payload;
await sql`
INSERT INTO ledger (
rune_id, block_hash, block_height, tx_index, tx_id, output,
address, receiver_address, amount, operation, timestamp, event_index
)
VALUES (
${rune_id}, ${block_hash}, ${block_height}, ${tx_index}, ${tx_id}, ${output}, ${address}, ${receiver_address}, ${amount}, ${operation}, 0, ${event_index}
)
`;
});
}
export async function insertSupplyChange(
db: PgStore,
rune_id: string,
block_height: number,
minted?: number,
total_mints?: number,
total_operations?: number
): Promise<void> {
await db.sqlWriteTransaction(async sql => {
const burned = 0;
const total_burned = 0;
await sql`
INSERT INTO supply_changes (
rune_id, block_height, minted, total_mints, burned, total_burns, total_operations
)
VALUES (
${rune_id}, ${block_height}, ${minted || 0}, ${
total_mints || 0
}, ${burned}, ${total_burned}, ${total_operations || 0}
)
`;
});
}
export async function insertRune(db: PgStore, payload: DbRune): Promise<void> {
await db.sqlWriteTransaction(async sql => {
const {
id,
name,
spaced_name,
number,
block_hash,
block_height,
tx_index,
tx_id,
symbol,
cenotaph,
terms_amount,
terms_cap,
terms_height_start,
terms_height_end,
} = payload;
await sql`
INSERT INTO runes (
id, number, name, spaced_name, block_hash, block_height, tx_index, tx_id, symbol, cenotaph,
terms_amount, terms_cap, terms_height_start, terms_height_end, timestamp
)
VALUES (
${id}, ${number}, ${name}, ${spaced_name}, ${block_hash}, ${block_height}, ${tx_index}, ${tx_id}, ${symbol}, ${cenotaph}, ${
terms_amount || ''
}, ${terms_cap || ''}, ${terms_height_start}, ${terms_height_end}, 0
)
`;
});
}

11
api/runes/tests/setup.ts Normal file
View File

@@ -0,0 +1,11 @@
// ts-unused-exports:disable-next-line
export default (): void => {
process.env.API_HOST = '0.0.0.0';
process.env.API_PORT = '3000';
process.env.RUNES_PGHOST = '127.0.0.1';
process.env.RUNES_PGPORT = '5432';
process.env.RUNES_PGUSER = 'postgres';
process.env.RUNES_PGPASSWORD = 'postgres';
process.env.RUNES_PGDATABASE = 'postgres';
process.env.RUNES_SCHEMA = 'public';
};

View File

@@ -0,0 +1,6 @@
{
"extends": "./tsconfig.json",
"exclude": [
"tests/**/*.ts",
]
}

113
api/runes/tsconfig.json Normal file
View File

@@ -0,0 +1,113 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig to read more about this file */
/* Projects */
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
/* Language and Environment */
"target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
"lib": [
"es2021"
] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
// "jsx": "preserve", /* Specify what JSX code is generated. */
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
/* Modules */
"module": "commonjs" /* Specify what module code is generated. */,
// "rootDir": "./", /* Specify the root folder within your source files. */
"moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */,
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
"typeRoots": [
"./src/@types",
"./node_modules/@types"
] /* Specify multiple folders that act like './node_modules/@types'. */,
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
// "resolveJsonModule": true, /* Enable importing .json files. */
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
/* JavaScript Support */
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
/* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
"sourceMap": true /* Create source map files for emitted JavaScript files. */,
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
// "removeComments": true, /* Disable emitting comments. */
// "noEmit": true, /* Disable emitting files from a compilation. */
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
// "newLine": "crlf", /* Set the newline character for emitting files. */
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
/* Interop Constraints */
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
"allowSyntheticDefaultImports": false /* Allow 'import x from y' when a module doesn't have a default export. */,
"esModuleInterop": false /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
/* Type Checking */
"strict": true /* Enable all strict type-checking options. */,
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
/* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */
},
"include": [
"./src/**/*.ts",
"./tests/**/*.ts",
"./util/**/*.ts"
],
}

View File

@@ -0,0 +1,36 @@
import Fastify, { FastifyPluginAsync } from 'fastify';
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
import { Api } from '../src/api/init';
import FastifySwagger from '@fastify/swagger';
import { existsSync, mkdirSync, writeFileSync } from 'fs';
import { Server } from 'http';
import { OpenApiSchemaOptions } from '../src/api/schemas';
/**
* Generates `openapi.yaml` based on current Swagger definitions.
*/
export const ApiGenerator: FastifyPluginAsync<
Record<never, never>,
Server,
TypeBoxTypeProvider
> = async (fastify, options) => {
await fastify.register(FastifySwagger, OpenApiSchemaOptions);
await fastify.register(Api, { prefix: '/runes/v1' });
if (!existsSync('./tmp')) {
mkdirSync('./tmp');
}
fastify.addHook('onReady', () => {
writeFileSync('./tmp/openapi.yaml', fastify.swagger({ yaml: true }));
writeFileSync('./tmp/openapi.json', JSON.stringify(fastify.swagger(), null, 2));
});
};
const fastify = Fastify({
trustProxy: true,
logger: true,
}).withTypeProvider<TypeBoxTypeProvider>();
void fastify.register(ApiGenerator).then(async () => {
await fastify.ready();
await fastify.close();
});

View File

@@ -0,0 +1,14 @@
FROM node:20-alpine
WORKDIR /app
COPY ./api/runes /app
COPY .git /.git
RUN apk add --no-cache --virtual .build-deps git
RUN npm ci --no-audit && \
npm run build && \
npm run generate:git-info && \
npm prune --production
RUN apk del .build-deps
CMD ["node", "./dist/src/index.js"]

View File

@@ -0,0 +1,35 @@
CREATE TABLE IF NOT EXISTS runes (
id TEXT NOT NULL PRIMARY KEY,
number BIGINT NOT NULL UNIQUE,
name TEXT NOT NULL UNIQUE,
spaced_name TEXT NOT NULL UNIQUE,
block_hash TEXT NOT NULL,
block_height NUMERIC NOT NULL,
tx_index BIGINT NOT NULL,
tx_id TEXT NOT NULL,
divisibility SMALLINT NOT NULL DEFAULT 0,
premine NUMERIC NOT NULL DEFAULT 0,
symbol TEXT NOT NULL DEFAULT '¤',
terms_amount NUMERIC,
terms_cap NUMERIC,
terms_height_start NUMERIC,
terms_height_end NUMERIC,
terms_offset_start NUMERIC,
terms_offset_end NUMERIC,
turbo BOOLEAN NOT NULL DEFAULT FALSE,
cenotaph BOOLEAN NOT NULL DEFAULT FALSE,
timestamp BIGINT NOT NULL
);
CREATE INDEX runes_block_height_tx_index_index ON runes (block_height DESC, tx_index DESC);
-- Insert default 'UNCOMMON•GOODS'
INSERT INTO runes (
id, number, name, spaced_name, block_hash, block_height, tx_index, tx_id, symbol, terms_amount,
terms_cap, terms_height_start, terms_height_end, timestamp
)
VALUES (
'1:0', 0, 'UNCOMMONGOODS', 'UNCOMMON•GOODS',
'0000000000000000000320283a032748cef8227873ff4872689bf23f1cda83a5', 840000, 0, '', '', 1,
'340282366920938463463374607431768211455', 840000, 1050000, 0
);

View File

@@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS supply_changes (
rune_id TEXT NOT NULL,
block_height NUMERIC NOT NULL,
minted NUMERIC NOT NULL DEFAULT 0,
total_mints NUMERIC NOT NULL DEFAULT 0,
burned NUMERIC NOT NULL DEFAULT 0,
total_burns NUMERIC NOT NULL DEFAULT 0,
total_operations NUMERIC NOT NULL DEFAULT 0,
PRIMARY KEY (rune_id, block_height)
);

View File

@@ -0,0 +1,21 @@
CREATE TYPE ledger_operation AS ENUM ('etching', 'mint', 'burn', 'send', 'receive');
CREATE TABLE IF NOT EXISTS ledger (
rune_id TEXT NOT NULL,
block_hash TEXT NOT NULL,
block_height NUMERIC NOT NULL,
tx_index BIGINT NOT NULL,
event_index BIGINT NOT NULL,
tx_id TEXT NOT NULL,
output BIGINT,
address TEXT,
receiver_address TEXT,
amount NUMERIC,
operation ledger_operation NOT NULL,
timestamp BIGINT NOT NULL
);
CREATE INDEX ledger_rune_id_index ON ledger (rune_id);
CREATE INDEX ledger_block_height_tx_index_event_index_index ON ledger (block_height DESC, tx_index DESC, event_index DESC);
CREATE INDEX ledger_address_rune_id_index ON ledger (address, rune_id);
CREATE INDEX ledger_tx_id_output_index ON ledger (tx_id, output);

View File

@@ -0,0 +1,11 @@
CREATE TABLE IF NOT EXISTS balance_changes (
rune_id TEXT NOT NULL,
block_height NUMERIC NOT NULL,
address TEXT NOT NULL,
balance NUMERIC NOT NULL,
total_operations BIGINT NOT NULL DEFAULT 0,
PRIMARY KEY (rune_id, block_height, address)
);
CREATE INDEX balance_changes_address_balance_index ON balance_changes (address, block_height, balance DESC);
CREATE INDEX balance_changes_rune_id_balance_index ON balance_changes (rune_id, block_height, balance DESC);