mirror of
https://github.com/alexgo-io/bitcoin-indexer.git
synced 2026-01-12 16:52:57 +08:00
feat: migrate ordinals api to ordhook repo (#389)
* feat: add database migration cli commands * update cargo lock * chore: import first api files * test: cache first * test: cache * test: sats * test: inscription show * test: inscription transfers * test: inscriptions index * test: inscriptions complete * test: counts * test: status * test: block counts * test: brc20 activity * test: brc20 holders * ci: api tests * ci: update nvmrc path * ci: remove migration refs * ci: unused exports * ci: unused * ci: build publish * ci: monorepo * fix: timestamps * Update api/ordinals/docs/feature-guides/rate-limiting.md Co-authored-by: ASuciuX <151519329+ASuciuX@users.noreply.github.com> * Update api/ordinals/docs/feature-guides/rate-limiting.md Co-authored-by: ASuciuX <151519329+ASuciuX@users.noreply.github.com> * Update api/ordinals/docs/overview.md Co-authored-by: ASuciuX <151519329+ASuciuX@users.noreply.github.com> * Update api/ordinals/src/api/schemas.ts Co-authored-by: ASuciuX <151519329+ASuciuX@users.noreply.github.com> * Update api/ordinals/src/api/schemas.ts Co-authored-by: ASuciuX <151519329+ASuciuX@users.noreply.github.com> * fix: warnings --------- Co-authored-by: ASuciuX <151519329+ASuciuX@users.noreply.github.com>
This commit is contained in:
190
.github/workflows/ci.yaml
vendored
190
.github/workflows/ci.yaml
vendored
@@ -15,19 +15,117 @@ concurrency:
|
||||
group: ${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DOCKER_IMAGE: hirosystems/${{ github.event.repository.name }}
|
||||
|
||||
jobs:
|
||||
api-lint:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
suite: [ordinals]
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api/${{ matrix.suite }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: 'api/${{ matrix.suite }}/.nvmrc'
|
||||
|
||||
- name: Cache node modules
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-node-modules
|
||||
with:
|
||||
path: |
|
||||
~/.npm
|
||||
**/node_modules
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-build-${{ env.cache-name }}-
|
||||
${{ runner.os }}-build-
|
||||
${{ runner.os }}-
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci --audit=false
|
||||
|
||||
- name: Lint ESLint
|
||||
run: npm run lint:eslint
|
||||
|
||||
- name: Lint Prettier
|
||||
run: npm run lint:prettier
|
||||
|
||||
- name: Lint Unused Exports
|
||||
run: npm run lint:unused-exports
|
||||
|
||||
api-test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
suite: [ordinals]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api/${{ matrix.suite }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: 'api/${{ matrix.suite }}/.nvmrc'
|
||||
|
||||
- name: Cache node modules
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-node-modules
|
||||
with:
|
||||
path: |
|
||||
~/.npm
|
||||
**/node_modules
|
||||
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-build-${{ env.cache-name }}-
|
||||
${{ runner.os }}-build-
|
||||
${{ runner.os }}-
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci --audit=false
|
||||
|
||||
- name: Setup integration environment
|
||||
run: |
|
||||
sudo ufw disable
|
||||
npm run testenv:run -- -d
|
||||
npm run testenv:logs -- --no-color &> docker-compose-logs.txt &
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test -- --coverage
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Print integration environment logs
|
||||
run: cat docker-compose-logs.txt
|
||||
if: failure()
|
||||
|
||||
- name: Teardown integration environment
|
||||
run: npm run testenv:stop
|
||||
if: always()
|
||||
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
suite: [cli, core]
|
||||
suite: [ordhook-cli, ordhook-core]
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./components/ordhook-${{ matrix.suite }}
|
||||
working-directory: ./components/${{ matrix.suite }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -73,11 +171,11 @@ jobs:
|
||||
run: docker compose -f ../../dockerfiles/docker-compose.dev.postgres.yml down -v -t 0
|
||||
if: always()
|
||||
|
||||
build-publish:
|
||||
semantic-release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: test
|
||||
needs: [api-lint, api-test, test]
|
||||
outputs:
|
||||
docker_image_digest: ${{ steps.docker_push.outputs.digest }}
|
||||
new_release_version: ${{ steps.semantic.outputs.new_release_version }}
|
||||
new_release_published: ${{ steps.semantic.outputs.new_release_published }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -92,19 +190,31 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SEMANTIC_RELEASE_PACKAGE: ${{ github.event.repository.name }}
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_CRATES_IO_API_KEY }}
|
||||
with:
|
||||
semantic_version: 19
|
||||
extra_plugins: |
|
||||
@semantic-release/changelog@6.0.3
|
||||
@semantic-release/git@10.0.1
|
||||
@semantic-release/exec@6.0.3
|
||||
conventional-changelog-conventionalcommits@6.1.0
|
||||
|
||||
build-publish:
|
||||
runs-on: ubuntu-latest
|
||||
needs: semantic-release
|
||||
outputs:
|
||||
docker_image_digest: ${{ steps.docker_push.outputs.digest }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Checkout tag
|
||||
if: steps.semantic.outputs.new_release_version != ''
|
||||
if: needs.semantic-release.outputs.new_release_version != ''
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: v${{ steps.semantic.outputs.new_release_version }}
|
||||
ref: v${{ needs.semantic-release.outputs.new_release_version }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
@@ -114,12 +224,12 @@ jobs:
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.DOCKER_IMAGE }}
|
||||
hirosystems/ordhook
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}},value=${{ steps.semantic.outputs.new_release_version }},enable=${{ steps.semantic.outputs.new_release_version != '' }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ steps.semantic.outputs.new_release_version }},enable=${{ steps.semantic.outputs.new_release_version != '' }}
|
||||
type=semver,pattern={{version}},value=${{ needs.semantic-release.outputs.new_release_version }},enable=${{ needs.semantic-release.outputs.new_release_version != '' }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.semantic-release.outputs.new_release_version }},enable=${{ needs.semantic-release.outputs.new_release_version != '' }}
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
@@ -141,7 +251,59 @@ jobs:
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
# Only push if (there's a new release on main branch, or if building a non-main branch) and (Only run on non-PR events or only PRs that aren't from forks)
|
||||
push: ${{ (github.ref != 'refs/heads/main' || steps.semantic.outputs.new_release_version != '') && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) }}
|
||||
push: ${{ (github.ref != 'refs/heads/main' || needs.semantic-release.outputs.new_release_version != '') && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) }}
|
||||
|
||||
api-build-publish:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
suite: [ordinals]
|
||||
runs-on: ubuntu-latest
|
||||
needs: semantic-release
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Checkout tag
|
||||
if: needs.semantic-release.outputs.new_release_version != ''
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: v${{ needs.semantic-release.outputs.new_release_version }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Docker Meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
hirosystems/${{ matrix.suite }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}},value=${{ needs.semantic-release.outputs.new_release_version }},enable=${{ needs.semantic-release.outputs.new_release_version != '' }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.semantic-release.outputs.new_release_version }},enable=${{ needs.semantic-release.outputs.new_release_version != '' }}
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Build/Tag/Push Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./api/${{ matrix.suite }}
|
||||
file: ./api/${{ matrix.suite }}/Dockerfile
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
# Only push if (there's a new release on main branch, or if building a non-main branch) and (Only run on non-PR events or only PRs that aren't from forks)
|
||||
push: ${{ (github.ref != 'refs/heads/master' || needs.semantic-release.outputs.new_release_version != '') && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) }}
|
||||
|
||||
deploy-dev:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
67
.vscode/launch.json
vendored
67
.vscode/launch.json
vendored
@@ -60,6 +60,71 @@
|
||||
},
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "test: ordinals-api",
|
||||
"program": "${workspaceFolder}/api/ordinals/node_modules/jest/bin/jest",
|
||||
"cwd": "${workspaceFolder}/api/ordinals/",
|
||||
"args": [
|
||||
"--testTimeout=3600000",
|
||||
"--runInBand",
|
||||
"--no-cache"
|
||||
],
|
||||
"outputCapture": "std",
|
||||
"console": "integratedTerminal",
|
||||
"preLaunchTask": "npm: testenv:run",
|
||||
"postDebugTask": "npm: testenv:stop",
|
||||
"env": {
|
||||
"PGHOST": "localhost",
|
||||
"PGUSER": "postgres",
|
||||
"PGPASSWORD": "postgres",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "test: ordinals-api (api)",
|
||||
"program": "${workspaceFolder}/api/ordinals/node_modules/jest/bin/jest",
|
||||
"cwd": "${workspaceFolder}/api/ordinals/",
|
||||
"args": [
|
||||
"--testTimeout=3600000",
|
||||
"--runInBand",
|
||||
"--no-cache",
|
||||
"${workspaceFolder}/api/ordinals/tests/api/"
|
||||
],
|
||||
"outputCapture": "std",
|
||||
"console": "integratedTerminal",
|
||||
"preLaunchTask": "npm: testenv:run",
|
||||
"postDebugTask": "npm: testenv:stop",
|
||||
"env": {
|
||||
"PGHOST": "localhost",
|
||||
"PGUSER": "postgres",
|
||||
"PGPASSWORD": "postgres",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "test: ordinals-api (brc-20)",
|
||||
"program": "${workspaceFolder}/api/ordinals/node_modules/jest/bin/jest",
|
||||
"cwd": "${workspaceFolder}/api/ordinals/",
|
||||
"args": [
|
||||
"--testTimeout=3600000",
|
||||
"--runInBand",
|
||||
"--no-cache",
|
||||
"${workspaceFolder}/api/ordinals/tests/brc-20/"
|
||||
],
|
||||
"outputCapture": "std",
|
||||
"console": "integratedTerminal",
|
||||
"preLaunchTask": "npm: testenv:run",
|
||||
"postDebugTask": "npm: testenv:stop",
|
||||
"env": {
|
||||
"PGHOST": "localhost",
|
||||
"PGUSER": "postgres",
|
||||
"PGPASSWORD": "postgres",
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
43
.vscode/tasks.json
vendored
Normal file
43
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "npm: testenv:run",
|
||||
"type": "shell",
|
||||
"command": "npm run testenv:run -- -d",
|
||||
"isBackground": true,
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/api/ordinals/",
|
||||
},
|
||||
"problemMatcher": {
|
||||
"pattern": {
|
||||
"regexp": ".",
|
||||
"file": 1,
|
||||
"location": 2,
|
||||
"message": 3
|
||||
},
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": ".",
|
||||
"endsPattern": "."
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "npm: testenv:stop",
|
||||
"type": "shell",
|
||||
"command": "npm run testenv:stop",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/api/ordinals/",
|
||||
},
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "silent",
|
||||
"focus": false,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": true,
|
||||
"clear": false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
3
api/ordinals/.commitlintrc.json
Normal file
3
api/ordinals/.commitlintrc.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"extends": ["@commitlint/config-conventional"]
|
||||
}
|
||||
2
api/ordinals/.dockerignore
Normal file
2
api/ordinals/.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
||||
**/node_modules
|
||||
**/tmp
|
||||
9
api/ordinals/.editorconfig
Normal file
9
api/ordinals/.editorconfig
Normal file
@@ -0,0 +1,9 @@
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
[{*.ts,*.json}]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
1
api/ordinals/.env.example
Normal file
1
api/ordinals/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
# See src/env.ts for environment variable documentation.
|
||||
2
api/ordinals/.eslintignore
Normal file
2
api/ordinals/.eslintignore
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
.eslintrc.js
|
||||
29
api/ordinals/.eslintrc.js
Normal file
29
api/ordinals/.eslintrc.js
Normal file
@@ -0,0 +1,29 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
extends: ['@stacks/eslint-config', 'prettier'],
|
||||
overrides: [],
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
tsconfigRootDir: __dirname,
|
||||
project: './tsconfig.json',
|
||||
ecmaVersion: 2020,
|
||||
sourceType: 'module',
|
||||
},
|
||||
ignorePatterns: ['*.config.js', 'config/*', '*.mjs', 'tests/*.js', 'client/*'],
|
||||
plugins: ['@typescript-eslint', 'eslint-plugin-tsdoc', 'prettier'],
|
||||
rules: {
|
||||
'prettier/prettier': 'error',
|
||||
'@typescript-eslint/no-inferrable-types': 'off',
|
||||
'@typescript-eslint/camelcase': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'@typescript-eslint/no-use-before-define': ['error', 'nofunc'],
|
||||
'@typescript-eslint/no-floating-promises': ['error', { ignoreVoid: true }],
|
||||
'no-warning-comments': 'warn',
|
||||
'tsdoc/syntax': 'error',
|
||||
// TODO: Remove this when `any` abi type is fixed.
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-member-access': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/restrict-template-expressions': 'off',
|
||||
},
|
||||
};
|
||||
43
api/ordinals/.gitignore
vendored
Normal file
43
api/ordinals/.gitignore
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
lib-cov
|
||||
*.seed
|
||||
*.log
|
||||
*.csv
|
||||
*.dat
|
||||
*.out
|
||||
*.pid
|
||||
*.gz
|
||||
*.swp
|
||||
|
||||
pids
|
||||
logs
|
||||
results
|
||||
tmp
|
||||
|
||||
# Build
|
||||
public/css/main.css
|
||||
|
||||
# Coverage reports
|
||||
coverage
|
||||
|
||||
# API keys and secrets
|
||||
.env
|
||||
|
||||
# Dependency directory
|
||||
node_modules
|
||||
bower_components
|
||||
|
||||
# Editors
|
||||
.idea
|
||||
*.iml
|
||||
|
||||
# OS metadata
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Ignore built ts files
|
||||
dist/**/*
|
||||
|
||||
# ignore yarn.lock
|
||||
yarn.lock
|
||||
.vercel
|
||||
.git-info
|
||||
1
api/ordinals/.nvmrc
Normal file
1
api/ordinals/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
18
|
||||
34
api/ordinals/.releaserc
Normal file
34
api/ordinals/.releaserc
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"extends": "semantic-release-monorepo",
|
||||
"branches": [
|
||||
"+([0-9])?(.{+([0-9]),x}).x",
|
||||
"master",
|
||||
"next",
|
||||
"next-major",
|
||||
{
|
||||
"name": "beta",
|
||||
"prerelease": true
|
||||
},
|
||||
{
|
||||
"name": "alpha",
|
||||
"prerelease": true
|
||||
}
|
||||
],
|
||||
"plugins": [
|
||||
[
|
||||
"@semantic-release/commit-analyzer",
|
||||
{
|
||||
"preset": "conventionalcommits"
|
||||
}
|
||||
],
|
||||
[
|
||||
"@semantic-release/release-notes-generator",
|
||||
{
|
||||
"preset": "conventionalcommits"
|
||||
}
|
||||
],
|
||||
"@semantic-release/github",
|
||||
"@semantic-release/changelog",
|
||||
"@semantic-release/git"
|
||||
]
|
||||
}
|
||||
1456
api/ordinals/CHANGELOG.md
Normal file
1456
api/ordinals/CHANGELOG.md
Normal file
File diff suppressed because it is too large
Load Diff
13
api/ordinals/Dockerfile
Normal file
13
api/ordinals/Dockerfile
Normal file
@@ -0,0 +1,13 @@
|
||||
FROM node:18-alpine
|
||||
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
|
||||
RUN apk add --no-cache --virtual .build-deps git
|
||||
RUN npm ci && \
|
||||
npm run build && \
|
||||
npm run generate:git-info && \
|
||||
npm prune --production
|
||||
RUN apk del .build-deps
|
||||
|
||||
CMD ["node", "./dist/src/index.js"]
|
||||
202
api/ordinals/LICENSE
Normal file
202
api/ordinals/LICENSE
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2023 Hiro Systems PBC
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
138
api/ordinals/README.md
Normal file
138
api/ordinals/README.md
Normal file
@@ -0,0 +1,138 @@
|
||||
|
||||
/ / ▶ Ordinals API
|
||||
/ --- / A service that ingests Bitcoin ordinal inscriptions to expose them via REST API endpoints.
|
||||
/ /
|
||||
|
||||
* [Features](#features)
|
||||
* [API Reference](#api-reference)
|
||||
* [Quick Start](#quick-start)
|
||||
* [System Requirements](#system-requirements)
|
||||
* [Running the API](#running-the-api)
|
||||
* [Run Modes](#run-modes)
|
||||
* [Stopping the API](#stopping-the-api)
|
||||
* [Bugs and Feature Requests](#bugs-and-feature-requests)
|
||||
* [Contribute](#contribute)
|
||||
* [Community](#community)
|
||||
|
||||
***
|
||||
|
||||
# Features
|
||||
|
||||
* Inscription endpoints
|
||||
* Genesis block and transaction information
|
||||
* Transfer history
|
||||
* Transfers per block
|
||||
* Current location and ownership information
|
||||
* Blessed and cursed inscriptions
|
||||
* BRC-20 endpoints
|
||||
* Full token deploy, mint and transfer history
|
||||
* Activities per token and per address
|
||||
* Address balances
|
||||
* Satoshi ordinal notation endpoints
|
||||
* ETag cache support
|
||||
* Run modes for auto-scaling
|
||||
|
||||
# API Reference
|
||||
|
||||
See the [Ordinals API Reference](https://docs.hiro.so/bitcoin/ordinals/api) for more
|
||||
information.
|
||||
|
||||
# Quick Start
|
||||
|
||||
## System Requirements
|
||||
|
||||
The Ordinals API has hard dependencies on other systems.
|
||||
Before you start, you'll need to have access to:
|
||||
|
||||
1. An [Ordhook node](https://github.com/hirosystems/ordhook) with a fully
|
||||
indexed Ordinals database.
|
||||
1. A local writeable Postgres database for data storage.
|
||||
* We recommended a 1TB volume size here.
|
||||
|
||||
## Running the API
|
||||
|
||||
1. Clone the repo.
|
||||
|
||||
1. Create an `.env` file and specify the appropriate values to configure the local
|
||||
API server, postgres DB and Ordhook node reachability. See
|
||||
[`env.ts`](https://github.com/hirosystems/ordinals-api/blob/develop/src/env.ts)
|
||||
for all available configuration options.
|
||||
|
||||
1. Build the app (NodeJS v18+ is required)
|
||||
```
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
1. Start the service
|
||||
```
|
||||
npm run start
|
||||
```
|
||||
|
||||
### Run Modes
|
||||
|
||||
To better support auto-scaling server configurations, this service supports
|
||||
three run modes specified by the `RUN_MODE` environment variable:
|
||||
|
||||
* `default`: Runs all background jobs and the API server. Use this when you're
|
||||
running this service only on one instance. This is the default mode.
|
||||
* `readonly`: Runs only the API server. Use this in an auto-scaled cluster when
|
||||
you have multiple `readonly` instances and just one `writeonly` instance. This
|
||||
mode needs a `writeonly` instance to continue populating the DB.
|
||||
* `writeonly`: Use one of these in an auto-scaled environment so you can
|
||||
continue consuming new inscriptions. Use in conjunction with multiple
|
||||
`readonly` instances as explained above.
|
||||
|
||||
### Stopping the API
|
||||
|
||||
When shutting down, you should always prefer to send the `SIGINT` signal instead
|
||||
of `SIGKILL` so the service has time to finish any pending background work and
|
||||
all dependencies are gracefully disconnected.
|
||||
|
||||
# Bugs and feature requests
|
||||
|
||||
If you encounter a bug or have a feature request, we encourage you to follow the
|
||||
steps below:
|
||||
|
||||
1. **Search for existing issues:** Before submitting a new issue, please search
|
||||
[existing and closed issues](../../issues) to check if a similar problem or
|
||||
feature request has already been reported.
|
||||
1. **Open a new issue:** If it hasn't been addressed, please [open a new
|
||||
issue](../../issues/new/choose). Choose the appropriate issue template and
|
||||
provide as much detail as possible, including steps to reproduce the bug or
|
||||
a clear description of the requested feature.
|
||||
1. **Evaluation SLA:** Our team reads and evaluates all the issues and pull
|
||||
requests. We are avaliable Monday to Friday and we make a best effort to
|
||||
respond within 7 business days.
|
||||
|
||||
Please **do not** use the issue tracker for personal support requests or to ask
|
||||
for the status of a transaction. You'll find help at the [#support Discord
|
||||
channel](https://discord.gg/SK3DxdsP).
|
||||
|
||||
|
||||
# Contribute
|
||||
|
||||
Development of this product happens in the open on GitHub, and we are grateful
|
||||
to the community for contributing bugfixes and improvements. Read below to learn
|
||||
how you can take part in improving the product.
|
||||
|
||||
## Code of Conduct
|
||||
Please read our [Code of conduct](../../../.github/blob/main/CODE_OF_CONDUCT.md)
|
||||
since we expect project participants to adhere to it.
|
||||
|
||||
## Contributing Guide
|
||||
Read our [contributing guide](.github/CONTRIBUTING.md) to learn about our
|
||||
development process, how to propose bugfixes and improvements, and how to build
|
||||
and test your changes.
|
||||
|
||||
# Community
|
||||
|
||||
Join our community and stay connected with the latest updates and discussions:
|
||||
|
||||
- [Join our Discord community chat](https://discord.gg/ZQR6cyZC) to engage with
|
||||
other users, ask questions, and participate in discussions.
|
||||
|
||||
- [Visit hiro.so](https://www.hiro.so/) for updates and subcribing to the
|
||||
mailing list.
|
||||
|
||||
- Follow [Hiro on Twitter.](https://twitter.com/hirosystems)
|
||||
6
api/ordinals/client/typescript.json
Normal file
6
api/ordinals/client/typescript.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"supportsES6": false,
|
||||
"npmName": "@hirosystems/ordinals-api-client",
|
||||
"npmVersion": "1.0.0",
|
||||
"modelPropertyNaming": "original"
|
||||
}
|
||||
4
api/ordinals/client/typescript/.gitignore
vendored
Normal file
4
api/ordinals/client/typescript/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
wwwroot/*.js
|
||||
node_modules
|
||||
typings
|
||||
dist
|
||||
4
api/ordinals/client/typescript/.swagger-codegen-ignore
Normal file
4
api/ordinals/client/typescript/.swagger-codegen-ignore
Normal file
@@ -0,0 +1,4 @@
|
||||
.gitignore
|
||||
package.json
|
||||
package-lock.json
|
||||
README.md
|
||||
1
api/ordinals/client/typescript/.swagger-codegen/VERSION
Normal file
1
api/ordinals/client/typescript/.swagger-codegen/VERSION
Normal file
@@ -0,0 +1 @@
|
||||
3.0.42
|
||||
19
api/ordinals/client/typescript/README.md
Normal file
19
api/ordinals/client/typescript/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
## @hirosystems/ordinals-api-client
|
||||
|
||||
This is a client library for the [Ordinals API](https://github.com/hirosystems/ordinals-api).
|
||||
|
||||
### Installation
|
||||
|
||||
```
|
||||
npm install @hirosystems/ordinals-api-client
|
||||
```
|
||||
|
||||
### Example
|
||||
|
||||
```typescript
|
||||
import { Configuration, InscriptionsApi } from "@hirosystems/ordinals-api-client";
|
||||
|
||||
const config = new Configuration();
|
||||
const api = new InscriptionsApi(config);
|
||||
const result = await api.getInscription("200000")
|
||||
```
|
||||
1171
api/ordinals/client/typescript/api.ts
Normal file
1171
api/ordinals/client/typescript/api.ts
Normal file
File diff suppressed because it is too large
Load Diff
86
api/ordinals/client/typescript/api_test.spec.ts
Normal file
86
api/ordinals/client/typescript/api_test.spec.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
/**
|
||||
* Ordinals API
|
||||
* A service that indexes Bitcoin Ordinals data and exposes it via REST API endpoints.
|
||||
*
|
||||
* OpenAPI spec version: v0.0.1
|
||||
*
|
||||
*
|
||||
* NOTE: This file is auto generated by the swagger code generator program.
|
||||
* https://github.com/swagger-api/swagger-codegen.git
|
||||
* Do not edit the file manually.
|
||||
*/
|
||||
|
||||
import * as api from "./api"
|
||||
import { Configuration } from "./configuration"
|
||||
|
||||
const config: Configuration = {}
|
||||
|
||||
describe("InscriptionsApi", () => {
|
||||
let instance: api.InscriptionsApi
|
||||
beforeEach(function() {
|
||||
instance = new api.InscriptionsApi(config)
|
||||
});
|
||||
|
||||
test("getInscription", () => {
|
||||
const id: string = "id_example"
|
||||
return expect(instance.getInscription(id, {})).resolves.toBe(null)
|
||||
})
|
||||
test("getInscriptionContent", () => {
|
||||
const id: string = "id_example"
|
||||
return expect(instance.getInscriptionContent(id, {})).resolves.toBe(null)
|
||||
})
|
||||
test("getInscriptionTransfers", () => {
|
||||
const id: string = "id_example"
|
||||
const offset: number = 56
|
||||
const limit: number = 56
|
||||
return expect(instance.getInscriptionTransfers(id, offset, limit, {})).resolves.toBe(null)
|
||||
})
|
||||
test("getInscriptions", () => {
|
||||
const genesis_block: string = "genesis_block_example"
|
||||
const from_genesis_block_height: string = "from_genesis_block_height_example"
|
||||
const to_genesis_block_height: string = "to_genesis_block_height_example"
|
||||
const from_genesis_timestamp: number = 56
|
||||
const to_genesis_timestamp: number = 56
|
||||
const from_sat_ordinal: number = 56
|
||||
const to_sat_ordinal: number = 56
|
||||
const from_sat_coinbase_height: string = "from_sat_coinbase_height_example"
|
||||
const to_sat_coinbase_height: string = "to_sat_coinbase_height_example"
|
||||
const from_number: number = 56
|
||||
const to_number: number = 56
|
||||
const id: Array<string> = ["id_example"]
|
||||
const number: Array<number> = [56]
|
||||
const output: string = "output_example"
|
||||
const address: Array<string> = ["address_example"]
|
||||
const mime_type: Array<string> = ["mime_type_example"]
|
||||
const rarity: Array<string> = ["rarity_example"]
|
||||
const offset: number = 56
|
||||
const limit: number = 56
|
||||
const order_by: string = "order_by_example"
|
||||
const order: string = "order_example"
|
||||
return expect(instance.getInscriptions(genesis_block, from_genesis_block_height, to_genesis_block_height, from_genesis_timestamp, to_genesis_timestamp, from_sat_ordinal, to_sat_ordinal, from_sat_coinbase_height, to_sat_coinbase_height, from_number, to_number, id, number, output, address, mime_type, rarity, offset, limit, order_by, order, {})).resolves.toBe(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe("SatoshisApi", () => {
|
||||
let instance: api.SatoshisApi
|
||||
beforeEach(function() {
|
||||
instance = new api.SatoshisApi(config)
|
||||
});
|
||||
|
||||
test("getSatoshi", () => {
|
||||
const ordinal: number = 56
|
||||
return expect(instance.getSatoshi(ordinal, {})).resolves.toBe(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe("StatusApi", () => {
|
||||
let instance: api.StatusApi
|
||||
beforeEach(function() {
|
||||
instance = new api.StatusApi(config)
|
||||
});
|
||||
|
||||
test("getApiStatus", () => {
|
||||
return expect(instance.getApiStatus({})).resolves.toBe(null)
|
||||
})
|
||||
})
|
||||
|
||||
65
api/ordinals/client/typescript/configuration.ts
Normal file
65
api/ordinals/client/typescript/configuration.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
// tslint:disable
|
||||
/**
|
||||
* Ordinals API
|
||||
* A service that indexes Bitcoin Ordinals data and exposes it via REST API endpoints.
|
||||
*
|
||||
* OpenAPI spec version: v0.0.1
|
||||
*
|
||||
*
|
||||
* NOTE: This file is auto generated by the swagger code generator program.
|
||||
* https://github.com/swagger-api/swagger-codegen.git
|
||||
* Do not edit the file manually.
|
||||
*/
|
||||
|
||||
export interface ConfigurationParameters {
|
||||
apiKey?: string | ((name: string) => string);
|
||||
username?: string;
|
||||
password?: string;
|
||||
accessToken?: string | ((name: string, scopes?: string[]) => string);
|
||||
basePath?: string;
|
||||
}
|
||||
|
||||
export class Configuration {
|
||||
/**
|
||||
* parameter for apiKey security
|
||||
* @param name security name
|
||||
* @memberof Configuration
|
||||
*/
|
||||
apiKey?: string | ((name: string) => string);
|
||||
/**
|
||||
* parameter for basic security
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof Configuration
|
||||
*/
|
||||
username?: string;
|
||||
/**
|
||||
* parameter for basic security
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof Configuration
|
||||
*/
|
||||
password?: string;
|
||||
/**
|
||||
* parameter for oauth2 security
|
||||
* @param name security name
|
||||
* @param scopes oauth2 scope
|
||||
* @memberof Configuration
|
||||
*/
|
||||
accessToken?: string | ((name: string, scopes?: string[]) => string);
|
||||
/**
|
||||
* override base path
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof Configuration
|
||||
*/
|
||||
basePath?: string;
|
||||
|
||||
constructor(param: ConfigurationParameters = {}) {
|
||||
this.apiKey = param.apiKey;
|
||||
this.username = param.username;
|
||||
this.password = param.password;
|
||||
this.accessToken = param.accessToken;
|
||||
this.basePath = param.basePath;
|
||||
}
|
||||
}
|
||||
2
api/ordinals/client/typescript/custom.d.ts
vendored
Normal file
2
api/ordinals/client/typescript/custom.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
declare module 'isomorphic-fetch';
|
||||
declare module 'url';
|
||||
51
api/ordinals/client/typescript/git_push.sh
Normal file
51
api/ordinals/client/typescript/git_push.sh
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/bin/sh
|
||||
# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/
|
||||
#
|
||||
# Usage example: /bin/sh ./git_push.sh wing328 swagger-petstore-perl "minor update"
|
||||
|
||||
git_user_id=$1
|
||||
git_repo_id=$2
|
||||
release_note=$3
|
||||
|
||||
if [ "$git_user_id" = "" ]; then
|
||||
git_user_id="GIT_USER_ID"
|
||||
echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id"
|
||||
fi
|
||||
|
||||
if [ "$git_repo_id" = "" ]; then
|
||||
git_repo_id="GIT_REPO_ID"
|
||||
echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id"
|
||||
fi
|
||||
|
||||
if [ "$release_note" = "" ]; then
|
||||
release_note="Minor update"
|
||||
echo "[INFO] No command line input provided. Set \$release_note to $release_note"
|
||||
fi
|
||||
|
||||
# Initialize the local directory as a Git repository
|
||||
git init
|
||||
|
||||
# Adds the files in the local repository and stages them for commit.
|
||||
git add .
|
||||
|
||||
# Commits the tracked changes and prepares them to be pushed to a remote repository.
|
||||
git commit -m "$release_note"
|
||||
|
||||
# Sets the new remote
|
||||
git_remote=`git remote`
|
||||
if [ "$git_remote" = "" ]; then # git remote not defined
|
||||
|
||||
if [ "$GIT_TOKEN" = "" ]; then
|
||||
echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment."
|
||||
git remote add origin https://github.com/${git_user_id}/${git_repo_id}.git
|
||||
else
|
||||
git remote add origin https://${git_user_id}:${GIT_TOKEN}@github.com/${git_user_id}/${git_repo_id}.git
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
git pull origin master
|
||||
|
||||
# Pushes (Forces) the changes in the local repository up to the remote repository
|
||||
echo "Git pushing to https://github.com/${git_user_id}/${git_repo_id}.git"
|
||||
git push origin master 2>&1 | grep -v 'To https'
|
||||
15
api/ordinals/client/typescript/index.ts
Normal file
15
api/ordinals/client/typescript/index.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
// tslint:disable
|
||||
/**
|
||||
* Ordinals API
|
||||
* A service that indexes Bitcoin Ordinals data and exposes it via REST API endpoints.
|
||||
*
|
||||
* OpenAPI spec version: v0.0.1
|
||||
*
|
||||
*
|
||||
* NOTE: This file is auto generated by the swagger code generator program.
|
||||
* https://github.com/swagger-api/swagger-codegen.git
|
||||
* Do not edit the file manually.
|
||||
*/
|
||||
|
||||
export * from "./api";
|
||||
export * from "./configuration";
|
||||
6428
api/ordinals/client/typescript/package-lock.json
generated
Normal file
6428
api/ordinals/client/typescript/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
53
api/ordinals/client/typescript/package.json
Normal file
53
api/ordinals/client/typescript/package.json
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"name": "@hirosystems/ordinals-api-client",
|
||||
"version": "1.0.0",
|
||||
"description": "Client for @hirosystems/ordinals-api",
|
||||
"author": "Hiro Systems PBC <engineering@hiro.so> (https://hiro.so)",
|
||||
"keywords": [
|
||||
"fetch",
|
||||
"typescript",
|
||||
"swagger-client",
|
||||
"@hirosystems/ordinals-api-client"
|
||||
],
|
||||
"license": "GPL-3.0",
|
||||
"main": "./dist/index.js",
|
||||
"typings": "./dist/index.d.ts",
|
||||
"files": [
|
||||
"dist/",
|
||||
"api.ts",
|
||||
"configuration.ts",
|
||||
"custom.d.ts",
|
||||
"index.ts"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc --outDir dist/",
|
||||
"test": "jest",
|
||||
"prepublishOnly": "npm run build"
|
||||
},
|
||||
"dependencies": {
|
||||
"isomorphic-fetch": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^25.2.1",
|
||||
"@types/node": "^13.13.0",
|
||||
"@types/babel__core": "7.1.18",
|
||||
"@types/babel__traverse": "7.14.2",
|
||||
"jest": "^25.4.0",
|
||||
"ts-jest": "^25.4.0",
|
||||
"typescript": "^3.8.3"
|
||||
},
|
||||
"jest": {
|
||||
"transform": {
|
||||
"^.+\\.tsx?$": "ts-jest"
|
||||
},
|
||||
"testRegex": "(/__tests__/.*|(\\.|/)(test|spec))\\.tsx?$",
|
||||
"moduleFileExtensions": [
|
||||
"ts",
|
||||
"tsx",
|
||||
"js",
|
||||
"jsx",
|
||||
"json",
|
||||
"node"
|
||||
]
|
||||
}
|
||||
}
|
||||
22
api/ordinals/client/typescript/tsconfig.json
Normal file
22
api/ordinals/client/typescript/tsconfig.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"target": "es5",
|
||||
"module": "commonjs",
|
||||
"noImplicitAny": true,
|
||||
"outDir": "dist",
|
||||
"rootDir": ".",
|
||||
"typeRoots": [
|
||||
"./node_modules/@types"
|
||||
],
|
||||
"lib": [
|
||||
"es6",
|
||||
"dom"
|
||||
]
|
||||
},
|
||||
"exclude": [
|
||||
"dist",
|
||||
"node_modules",
|
||||
"**/*.spec.ts"
|
||||
]
|
||||
}
|
||||
15
api/ordinals/docs/feature-guides/rate-limiting.md
Normal file
15
api/ordinals/docs/feature-guides/rate-limiting.md
Normal file
@@ -0,0 +1,15 @@
|
||||
---
|
||||
Title: Rate Limiting for Ordinals API
|
||||
---
|
||||
|
||||
# Rate Limiting for Ordinals API
|
||||
|
||||
The Rate Limit per Minute(RPM) is applied to all the API endpoints based on the requested token addresses.
|
||||
|
||||
|
||||
| **Endpoint** | **API Key Used** | **Rate per minute(RPM) limit** |
|
||||
|------------------------------|--------------------|--------------------------------|
|
||||
| api.mainnet.hiro.so/ordinals | No | 50 |
|
||||
| api.mainnet.hiro.so/ordinals | Yes | 500 |
|
||||
|
||||
If you're interested in obtaining an API key from Hiro, you can generate a free key in the [Hiro Platform](https://platform.hiro.so/).
|
||||
36
api/ordinals/docs/overview.md
Normal file
36
api/ordinals/docs/overview.md
Normal file
@@ -0,0 +1,36 @@
|
||||
---
|
||||
Title: Overview
|
||||
---
|
||||
|
||||
# Ordinals API Overview
|
||||
|
||||
The Ordinals API provides a service that indexes Bitcoin Ordinals data and offers a REST API to access and query this data.
|
||||
|
||||
> **_NOTE:_**
|
||||
>
|
||||
> To explore the detailed documentation for the API endpoints, request and response formats, you can refer to the [OpenAPI specification](https://docs.hiro.so/ordinals).
|
||||
>
|
||||
> The source code for this project is available in our [GitHub repository](https://github.com/hirosystems/ordinals-api).
|
||||
> You can explore the codebase, [contribute](https://docs.hiro.so/contributors-guide), and raise [issues](https://github.com/hirosystems/ordinals-api/issues) or [pull requests](https://github.com/hirosystems/ordinals-api/pulls).
|
||||
|
||||
Here are the key features of the Ordinals API:
|
||||
|
||||
**Ordinal Inscription Ingestion**:
|
||||
The API helps with the complete ingestion of ordinal inscriptions.
|
||||
Using our endpoitns, you can retrieve the metadata for a particular inscription, all inscriptions held by a particular address, trading activity for inscriptions, and more.
|
||||
|
||||
**BRC-20 Support**:
|
||||
The API offers support for BRC-20 tokens, a fungible token standard built on top of ordinal theory.
|
||||
Retrieve data for a particular BRC-20 token, a user's BRC-20 holdings, marketplace activity, and more.
|
||||
|
||||
**REST JSON Endpoints with ETag Caching**:
|
||||
The API provides easy-to-use REST endpoints that return responses in JSON format.
|
||||
It also supports *ETag caching*, which allows you to cache responses based on inscriptions.
|
||||
This helps optimize performance and reduce unnecessary requests.
|
||||
|
||||
**Auto-Scale Server Configurations**:
|
||||
The Ordinals API supports three run modes based on the `RUN_MODE` environment variable:
|
||||
|
||||
- `default`: This mode runs all background jobs and the API server. It is suitable for running a single instance of the API.
|
||||
- `readonly`: Only the API server runs in this mode. It is designed for auto-scaled clusters with multiple `readonly` instances and a single `writeonly` instance. The `writeonly` instance is responsible for populating the database.
|
||||
- `writeonly`: This mode is used in an auto-scaled environment to consume new inscriptions and push that data to a database. It works in conjunction with multiple `readonly` instances.
|
||||
201
api/ordinals/jest.config.js
Normal file
201
api/ordinals/jest.config.js
Normal file
@@ -0,0 +1,201 @@
|
||||
/*
|
||||
* For a detailed explanation regarding each configuration property, visit:
|
||||
* https://jestjs.io/docs/configuration
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
// All imported modules in your tests should be mocked automatically
|
||||
// automock: false,
|
||||
|
||||
// Stop running tests after `n` failures
|
||||
// bail: 0,
|
||||
|
||||
// The directory where Jest should store its cached dependency information
|
||||
// cacheDirectory: "/private/var/folders/v3/swygw5ld38x59y9wtc2qv3fc0000gn/T/jest_dx",
|
||||
|
||||
// Automatically clear mock calls, instances, contexts and results before every test
|
||||
// clearMocks: false,
|
||||
|
||||
// Indicates whether the coverage information should be collected while executing the test
|
||||
// collectCoverage: false,
|
||||
|
||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||
collectCoverageFrom: [
|
||||
"src/**/*.ts",
|
||||
"migrations/*.ts",
|
||||
],
|
||||
|
||||
// The directory where Jest should output its coverage files
|
||||
// coverageDirectory: undefined,
|
||||
|
||||
// An array of regexp pattern strings used to skip coverage collection
|
||||
coveragePathIgnorePatterns: [
|
||||
"/node_modules/",
|
||||
"/src/@types/"
|
||||
],
|
||||
|
||||
// Indicates which provider should be used to instrument code for coverage
|
||||
coverageProvider: "v8",
|
||||
|
||||
// A list of reporter names that Jest uses when writing coverage reports
|
||||
// coverageReporters: [
|
||||
// "json",
|
||||
// "text",
|
||||
// "lcov",
|
||||
// "clover"
|
||||
// ],
|
||||
|
||||
// An object that configures minimum threshold enforcement for coverage results
|
||||
// coverageThreshold: undefined,
|
||||
|
||||
// A path to a custom dependency extractor
|
||||
// dependencyExtractor: undefined,
|
||||
|
||||
// Make calling deprecated APIs throw helpful error messages
|
||||
// errorOnDeprecated: false,
|
||||
|
||||
// The default configuration for fake timers
|
||||
// fakeTimers: {
|
||||
// "enableGlobally": false
|
||||
// },
|
||||
|
||||
// Force coverage collection from ignored files using an array of glob patterns
|
||||
// forceCoverageMatch: [],
|
||||
|
||||
// A path to a module which exports an async function that is triggered once before all test suites
|
||||
globalSetup: './tests/setup.ts',
|
||||
|
||||
// A path to a module which exports an async function that is triggered once after all test suites
|
||||
// globalTeardown: undefined,
|
||||
|
||||
// A set of global variables that need to be available in all test environments
|
||||
// globals: {},
|
||||
|
||||
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||
// maxWorkers: "50%",
|
||||
|
||||
// An array of directory names to be searched recursively up from the requiring module's location
|
||||
// moduleDirectories: [
|
||||
// "node_modules"
|
||||
// ],
|
||||
|
||||
// An array of file extensions your modules use
|
||||
// moduleFileExtensions: [
|
||||
// "js",
|
||||
// "mjs",
|
||||
// "cjs",
|
||||
// "jsx",
|
||||
// "ts",
|
||||
// "tsx",
|
||||
// "json",
|
||||
// "node"
|
||||
// ],
|
||||
|
||||
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||
// moduleNameMapper: {},
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
// modulePathIgnorePatterns: [],
|
||||
|
||||
// Activates notifications for test results
|
||||
// notify: false,
|
||||
|
||||
// An enum that specifies notification mode. Requires { notify: true }
|
||||
// notifyMode: "failure-change",
|
||||
|
||||
// A preset that is used as a base for Jest's configuration
|
||||
preset: 'ts-jest',
|
||||
|
||||
// Run tests from one or more projects
|
||||
// projects: undefined,
|
||||
|
||||
// Use this configuration option to add custom reporters to Jest
|
||||
// reporters: undefined,
|
||||
|
||||
// Automatically reset mock state before every test
|
||||
// resetMocks: false,
|
||||
|
||||
// Reset the module registry before running each individual test
|
||||
// resetModules: false,
|
||||
|
||||
// A path to a custom resolver
|
||||
// resolver: undefined,
|
||||
|
||||
// Automatically restore mock state and implementation before every test
|
||||
// restoreMocks: false,
|
||||
|
||||
// The root directory that Jest should scan for tests and modules within
|
||||
rootDir: '',
|
||||
|
||||
// A list of paths to directories that Jest should use to search for files in
|
||||
// roots: [
|
||||
// "<rootDir>"
|
||||
// ],
|
||||
|
||||
// Allows you to use a custom runner instead of Jest's default test runner
|
||||
// runner: "jest-runner",
|
||||
|
||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||
// setupFiles: [],
|
||||
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||
// setupFilesAfterEnv: [],
|
||||
|
||||
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||
// slowTestThreshold: 5,
|
||||
|
||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||
// snapshotSerializers: [],
|
||||
|
||||
// The test environment that will be used for testing
|
||||
// testEnvironment: "jest-environment-node",
|
||||
|
||||
// Options that will be passed to the testEnvironment
|
||||
// testEnvironmentOptions: {},
|
||||
|
||||
// Adds a location field to test results
|
||||
// testLocationInResults: false,
|
||||
|
||||
// The glob patterns Jest uses to detect test files
|
||||
// testMatch: [
|
||||
// "**/__tests__/**/*.[jt]s?(x)",
|
||||
// "**/?(*.)+(spec|test).[tj]s?(x)"
|
||||
// ],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||
testPathIgnorePatterns: [
|
||||
"/node_modules/",
|
||||
"/client/",
|
||||
"/dist/"
|
||||
],
|
||||
|
||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||
// testRegex: [],
|
||||
|
||||
// This option allows the use of a custom results processor
|
||||
// testResultsProcessor: undefined,
|
||||
|
||||
// This option allows use of a custom test runner
|
||||
// testRunner: "jest-circus/runner",
|
||||
|
||||
// A map from regular expressions to paths to transformers
|
||||
transform: {},
|
||||
|
||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||
// transformIgnorePatterns: [
|
||||
// "/node_modules/",
|
||||
// "\\.pnp\\.[^\\/]+$"
|
||||
// ],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||
// unmockedModulePathPatterns: undefined,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
// verbose: undefined,
|
||||
|
||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||
// watchPathIgnorePatterns: [],
|
||||
|
||||
// Whether to use watchman for file crawling
|
||||
// watchman: true,
|
||||
};
|
||||
33013
api/ordinals/package-lock.json
generated
Normal file
33013
api/ordinals/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
74
api/ordinals/package.json
Normal file
74
api/ordinals/package.json
Normal file
@@ -0,0 +1,74 @@
|
||||
{
|
||||
"name": "@hirosystems/ordinals-api",
|
||||
"description": "A microservice that indexes Bitcoin Ordinal inscription data and exposes it via REST API endpoints.",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"author": "Hiro Systems PBC <engineering@hiro.so> (https://hiro.so)",
|
||||
"license": "Apache 2.0",
|
||||
"scripts": {
|
||||
"build": "rimraf ./dist && tsc --project tsconfig.build.json",
|
||||
"start": "node dist/src/index.js",
|
||||
"start-ts": "ts-node ./src/index.ts",
|
||||
"test": "jest --runInBand",
|
||||
"test:brc-20": "npm run test -- ./tests/brc-20/",
|
||||
"test:api": "npm run test -- ./tests/api/",
|
||||
"lint:eslint": "eslint . --ext .js,.jsx,.ts,.tsx -f unix",
|
||||
"lint:prettier": "prettier --check src/**/*.ts tests/**/*.ts",
|
||||
"lint:unused-exports": "ts-unused-exports tsconfig.json --showLineNumber --excludePathsFromReport=util/*",
|
||||
"generate:openapi": "rimraf ./tmp && node -r ts-node/register ./util/openapi-generator.ts",
|
||||
"generate:docs": "redoc-cli build --output ./tmp/index.html ./tmp/openapi.yaml",
|
||||
"generate:git-info": "rimraf .git-info && node_modules/.bin/api-toolkit-git-info",
|
||||
"generate:vercel": "npm run generate:git-info && npm run generate:openapi && npm run generate:docs",
|
||||
"testenv:run": "docker compose -f ../../dockerfiles/docker-compose.dev.postgres.yml up",
|
||||
"testenv:stop": "docker compose -f ../../dockerfiles/docker-compose.dev.postgres.yml down -v -t 0",
|
||||
"testenv:logs": "docker compose -f ../../dockerfiles/docker-compose.dev.postgres.yml logs -t -f",
|
||||
"generate:client:typescript": "swagger-codegen generate -i ./tmp/openapi.yaml -l typescript-fetch -o ./client/typescript -c ./client/typescript.json"
|
||||
},
|
||||
"prettier": "@stacks/prettier-config",
|
||||
"devDependencies": {
|
||||
"@commitlint/cli": "^17.4.3",
|
||||
"@commitlint/config-conventional": "^17.4.3",
|
||||
"@stacks/eslint-config": "^1.2.0",
|
||||
"@types/jest": "^29.2.4",
|
||||
"@types/supertest": "^2.0.12",
|
||||
"@typescript-eslint/eslint-plugin": "^5.46.1",
|
||||
"@typescript-eslint/parser": "^5.51.0",
|
||||
"@semantic-release/changelog": "^6.0.3",
|
||||
"@semantic-release/commit-analyzer": "^10.0.4",
|
||||
"@semantic-release/git": "^10.0.1",
|
||||
"babel-jest": "^29.3.1",
|
||||
"conventional-changelog-conventionalcommits": "^6.1.0",
|
||||
"eslint": "^8.29.0",
|
||||
"eslint-plugin-prettier": "^4.2.1",
|
||||
"eslint-plugin-tsdoc": "^0.2.17",
|
||||
"husky": "^8.0.3",
|
||||
"jest": "^29.3.1",
|
||||
"prettier": "^2.8.1",
|
||||
"redoc-cli": "^0.13.20",
|
||||
"rimraf": "^3.0.2",
|
||||
"semantic-release": "^24.2.1",
|
||||
"semantic-release-monorepo": "^8.0.2",
|
||||
"ts-jest": "^29.0.3",
|
||||
"ts-node": "^10.8.2",
|
||||
"ts-unused-exports": "^10.0.1",
|
||||
"typescript": "^4.7.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fastify/cors": "^8.0.0",
|
||||
"@fastify/formbody": "^7.0.1",
|
||||
"@fastify/multipart": "^7.1.0",
|
||||
"@fastify/swagger": "^8.3.1",
|
||||
"@fastify/type-provider-typebox": "^3.2.0",
|
||||
"@hirosystems/api-toolkit": "^1.7.2",
|
||||
"@hirosystems/chainhook-client": "^1.12.0",
|
||||
"@types/node": "^18.13.0",
|
||||
"bignumber.js": "^9.1.1",
|
||||
"bitcoinjs-lib": "^6.1.0",
|
||||
"env-schema": "^5.2.0",
|
||||
"fastify": "^4.3.0",
|
||||
"fastify-metrics": "^10.2.0",
|
||||
"pino": "^8.10.0",
|
||||
"postgres": "^3.3.4",
|
||||
"undici": "^5.8.0"
|
||||
}
|
||||
}
|
||||
16
api/ordinals/src/@types/fastify/index.d.ts
vendored
Normal file
16
api/ordinals/src/@types/fastify/index.d.ts
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
import fastify from 'fastify';
|
||||
import { PgStore } from '../../pg/pg-store';
|
||||
import { Brc20PgStore } from '../../pg/brc20/brc20-pg-store';
|
||||
|
||||
declare module 'fastify' {
|
||||
export interface FastifyInstance<
|
||||
HttpServer = Server,
|
||||
HttpRequest = IncomingMessage,
|
||||
HttpResponse = ServerResponse,
|
||||
Logger = FastifyLoggerInstance,
|
||||
TypeProvider = FastifyTypeProviderDefault
|
||||
> {
|
||||
db: PgStore;
|
||||
brc20Db: Brc20PgStore;
|
||||
}
|
||||
}
|
||||
61
api/ordinals/src/api/init.ts
Normal file
61
api/ordinals/src/api/init.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import FastifyCors from '@fastify/cors';
|
||||
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
|
||||
import { PINO_LOGGER_CONFIG, isProdEnv } from '@hirosystems/api-toolkit';
|
||||
import Fastify, { FastifyPluginAsync } from 'fastify';
|
||||
import FastifyMetrics, { IFastifyMetrics } from 'fastify-metrics';
|
||||
import { Server } from 'http';
|
||||
import { PgStore } from '../pg/pg-store';
|
||||
import { Brc20Routes } from './routes/brc20';
|
||||
import { InscriptionsRoutes } from './routes/inscriptions';
|
||||
import { SatRoutes } from './routes/sats';
|
||||
import { StatsRoutes } from './routes/stats';
|
||||
import { StatusRoutes } from './routes/status';
|
||||
import { Brc20PgStore } from '../pg/brc20/brc20-pg-store';
|
||||
|
||||
export const Api: FastifyPluginAsync<
|
||||
Record<never, never>,
|
||||
Server,
|
||||
TypeBoxTypeProvider
|
||||
> = async fastify => {
|
||||
await fastify.register(StatusRoutes);
|
||||
await fastify.register(InscriptionsRoutes);
|
||||
await fastify.register(SatRoutes);
|
||||
await fastify.register(StatsRoutes);
|
||||
await fastify.register(Brc20Routes);
|
||||
};
|
||||
|
||||
export async function buildApiServer(args: { db: PgStore; brc20Db: Brc20PgStore }) {
|
||||
const fastify = Fastify({
|
||||
trustProxy: true,
|
||||
logger: PINO_LOGGER_CONFIG,
|
||||
}).withTypeProvider<TypeBoxTypeProvider>();
|
||||
|
||||
fastify.decorate('db', args.db);
|
||||
fastify.decorate('brc20Db', args.brc20Db);
|
||||
if (isProdEnv) {
|
||||
await fastify.register(FastifyMetrics, { endpoint: null });
|
||||
}
|
||||
await fastify.register(FastifyCors);
|
||||
await fastify.register(Api, { prefix: '/ordinals/v1' });
|
||||
await fastify.register(Api, { prefix: '/ordinals' });
|
||||
|
||||
return fastify;
|
||||
}
|
||||
|
||||
export async function buildPromServer(args: { metrics: IFastifyMetrics }) {
|
||||
const promServer = Fastify({
|
||||
trustProxy: true,
|
||||
logger: PINO_LOGGER_CONFIG,
|
||||
});
|
||||
|
||||
promServer.route({
|
||||
url: '/metrics',
|
||||
method: 'GET',
|
||||
logLevel: 'info',
|
||||
handler: async (_, reply) => {
|
||||
await reply.type('text/plain').send(await args.metrics.client.register.metrics());
|
||||
},
|
||||
});
|
||||
|
||||
return promServer;
|
||||
}
|
||||
241
api/ordinals/src/api/routes/brc20.ts
Normal file
241
api/ordinals/src/api/routes/brc20.ts
Normal file
@@ -0,0 +1,241 @@
|
||||
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
|
||||
import { Type } from '@sinclair/typebox';
|
||||
import { Value } from '@sinclair/typebox/value';
|
||||
import { FastifyPluginCallback } from 'fastify';
|
||||
import { Server } from 'http';
|
||||
import {
|
||||
AddressParam,
|
||||
BlockHeightParam,
|
||||
Brc20TokensOrderByParam,
|
||||
Brc20ActivityResponseSchema,
|
||||
Brc20BalanceResponseSchema,
|
||||
Brc20HolderResponseSchema,
|
||||
Brc20OperationsParam,
|
||||
Brc20TickerParam,
|
||||
Brc20TickersParam,
|
||||
Brc20TokenDetailsSchema,
|
||||
Brc20TokenResponseSchema,
|
||||
LimitParam,
|
||||
NotFoundResponse,
|
||||
OffsetParam,
|
||||
PaginatedResponse,
|
||||
} from '../schemas';
|
||||
import { handleInscriptionTransfersCache } from '../util/cache';
|
||||
import {
|
||||
DEFAULT_API_LIMIT,
|
||||
parseBrc20Activities,
|
||||
parseBrc20Balances,
|
||||
parseBrc20Holders,
|
||||
parseBrc20Supply,
|
||||
parseBrc20Tokens,
|
||||
} from '../util/helpers';
|
||||
|
||||
export const Brc20Routes: FastifyPluginCallback<
|
||||
Record<never, never>,
|
||||
Server,
|
||||
TypeBoxTypeProvider
|
||||
> = (fastify, options, done) => {
|
||||
fastify.addHook('preHandler', handleInscriptionTransfersCache);
|
||||
|
||||
fastify.get(
|
||||
'/brc-20/tokens',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getBrc20Tokens',
|
||||
summary: 'BRC-20 Tokens',
|
||||
description: 'Retrieves information for BRC-20 tokens',
|
||||
tags: ['BRC-20'],
|
||||
querystring: Type.Object({
|
||||
ticker: Type.Optional(Brc20TickersParam),
|
||||
// Sorting
|
||||
order_by: Type.Optional(Brc20TokensOrderByParam),
|
||||
// Pagination
|
||||
offset: Type.Optional(OffsetParam),
|
||||
limit: Type.Optional(LimitParam),
|
||||
}),
|
||||
response: {
|
||||
200: PaginatedResponse(Brc20TokenResponseSchema, 'Paginated BRC-20 Token Response'),
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const limit = request.query.limit ?? DEFAULT_API_LIMIT;
|
||||
const offset = request.query.offset ?? 0;
|
||||
const response = await fastify.brc20Db.getTokens({
|
||||
limit,
|
||||
offset,
|
||||
ticker: request.query.ticker,
|
||||
order_by: request.query.order_by,
|
||||
});
|
||||
await reply.send({
|
||||
limit,
|
||||
offset,
|
||||
total: response.total,
|
||||
results: parseBrc20Tokens(response.results),
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
fastify.get(
|
||||
'/brc-20/tokens/:ticker',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getBrc20TokenDetails',
|
||||
summary: 'BRC-20 Token Details',
|
||||
description: 'Retrieves information for a BRC-20 token including supply and holders',
|
||||
tags: ['BRC-20'],
|
||||
params: Type.Object({
|
||||
ticker: Brc20TickerParam,
|
||||
}),
|
||||
response: {
|
||||
200: Brc20TokenDetailsSchema,
|
||||
404: NotFoundResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const token = await fastify.brc20Db.getToken({ ticker: request.params.ticker });
|
||||
if (!token) {
|
||||
await reply.code(404).send(Value.Create(NotFoundResponse));
|
||||
} else {
|
||||
await reply.send({
|
||||
token: parseBrc20Tokens([token])[0],
|
||||
supply: parseBrc20Supply(token),
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
fastify.get(
|
||||
'/brc-20/tokens/:ticker/holders',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getBrc20TokenHolders',
|
||||
summary: 'BRC-20 Token Holders',
|
||||
description: 'Retrieves a list of holders and their balances for a BRC-20 token',
|
||||
tags: ['BRC-20'],
|
||||
params: Type.Object({
|
||||
ticker: Brc20TickerParam,
|
||||
}),
|
||||
querystring: Type.Object({
|
||||
// Pagination
|
||||
offset: Type.Optional(OffsetParam),
|
||||
limit: Type.Optional(LimitParam),
|
||||
}),
|
||||
response: {
|
||||
200: PaginatedResponse(Brc20HolderResponseSchema, 'Paginated BRC-20 Holders Response'),
|
||||
404: NotFoundResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const limit = request.query.limit ?? DEFAULT_API_LIMIT;
|
||||
const offset = request.query.offset ?? 0;
|
||||
const holders = await fastify.brc20Db.getTokenHolders({
|
||||
limit,
|
||||
offset,
|
||||
ticker: request.params.ticker,
|
||||
});
|
||||
if (!holders) {
|
||||
await reply.code(404).send(Value.Create(NotFoundResponse));
|
||||
return;
|
||||
}
|
||||
await reply.send({
|
||||
limit,
|
||||
offset,
|
||||
total: holders.total,
|
||||
results: parseBrc20Holders(holders.results),
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
fastify.get(
|
||||
'/brc-20/balances/:address',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getBrc20Balances',
|
||||
summary: 'BRC-20 Balances',
|
||||
description: 'Retrieves BRC-20 token balances for a Bitcoin address',
|
||||
tags: ['BRC-20'],
|
||||
params: Type.Object({
|
||||
address: AddressParam,
|
||||
}),
|
||||
querystring: Type.Object({
|
||||
ticker: Type.Optional(Brc20TickersParam),
|
||||
block_height: Type.Optional(BlockHeightParam),
|
||||
// Pagination
|
||||
offset: Type.Optional(OffsetParam),
|
||||
limit: Type.Optional(LimitParam),
|
||||
}),
|
||||
response: {
|
||||
200: PaginatedResponse(Brc20BalanceResponseSchema, 'Paginated BRC-20 Balance Response'),
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const limit = request.query.limit ?? DEFAULT_API_LIMIT;
|
||||
const offset = request.query.offset ?? 0;
|
||||
const balances = await fastify.brc20Db.getBalances({
|
||||
limit,
|
||||
offset,
|
||||
address: request.params.address,
|
||||
ticker: request.query.ticker,
|
||||
block_height: request.query.block_height ? parseInt(request.query.block_height) : undefined,
|
||||
});
|
||||
await reply.send({
|
||||
limit,
|
||||
offset,
|
||||
total: balances.total,
|
||||
results: parseBrc20Balances(balances.results),
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
fastify.get(
|
||||
'/brc-20/activity',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getBrc20Activity',
|
||||
summary: 'BRC-20 Activity',
|
||||
description:
|
||||
'Retrieves BRC-20 activity filtered by ticker, address, operation, or at a specific block height',
|
||||
tags: ['BRC-20'],
|
||||
querystring: Type.Object({
|
||||
ticker: Type.Optional(Brc20TickersParam),
|
||||
block_height: Type.Optional(BlockHeightParam),
|
||||
operation: Type.Optional(Brc20OperationsParam),
|
||||
address: Type.Optional(AddressParam),
|
||||
// Pagination
|
||||
offset: Type.Optional(OffsetParam),
|
||||
limit: Type.Optional(LimitParam),
|
||||
}),
|
||||
response: {
|
||||
200: PaginatedResponse(Brc20ActivityResponseSchema, 'Paginated BRC-20 Activity Response'),
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const limit = request.query.limit ?? DEFAULT_API_LIMIT;
|
||||
const offset = request.query.offset ?? 0;
|
||||
const balances = await fastify.brc20Db.getActivity(
|
||||
{ limit, offset },
|
||||
{
|
||||
ticker: request.query.ticker,
|
||||
block_height: request.query.block_height
|
||||
? parseInt(request.query.block_height)
|
||||
: undefined,
|
||||
operation: request.query.operation,
|
||||
address: request.query.address,
|
||||
}
|
||||
);
|
||||
await reply.send({
|
||||
limit,
|
||||
offset,
|
||||
total: balances.total,
|
||||
results: parseBrc20Activities(balances.results),
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
done();
|
||||
};
|
||||
312
api/ordinals/src/api/routes/inscriptions.ts
Normal file
312
api/ordinals/src/api/routes/inscriptions.ts
Normal file
@@ -0,0 +1,312 @@
|
||||
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
|
||||
import { Type } from '@sinclair/typebox';
|
||||
import { Value } from '@sinclair/typebox/value';
|
||||
import { FastifyPluginAsync, FastifyPluginCallback } from 'fastify';
|
||||
import { Server } from 'http';
|
||||
import {
|
||||
AddressesParam,
|
||||
BlockHeightParam,
|
||||
BlockInscriptionTransferSchema,
|
||||
BlockParam,
|
||||
CursedParam,
|
||||
InscriptionIdParamCType,
|
||||
InscriptionIdentifierParam,
|
||||
InscriptionIdsParam,
|
||||
InscriptionLocationResponseSchema,
|
||||
InscriptionNumberParam,
|
||||
InscriptionNumbersParam,
|
||||
InscriptionResponse,
|
||||
LimitParam,
|
||||
MimeTypesParam,
|
||||
NotFoundResponse,
|
||||
OffsetParam,
|
||||
Order,
|
||||
OrderBy,
|
||||
OrderByParam,
|
||||
OrderParam,
|
||||
OrdinalParam,
|
||||
OutputParam,
|
||||
PaginatedResponse,
|
||||
RecursiveParam,
|
||||
SatoshiRaritiesParam,
|
||||
TimestampParam,
|
||||
} from '../schemas';
|
||||
import { handleInscriptionCache, handleInscriptionTransfersCache } from '../util/cache';
|
||||
import {
|
||||
DEFAULT_API_LIMIT,
|
||||
blockParam,
|
||||
hexToBuffer,
|
||||
parseBlockTransfers,
|
||||
parseDbInscription,
|
||||
parseDbInscriptions,
|
||||
parseInscriptionLocations,
|
||||
} from '../util/helpers';
|
||||
|
||||
function inscriptionIdArrayParam(param: string | number) {
|
||||
return InscriptionIdParamCType.Check(param) ? { genesis_id: [param] } : { number: [param] };
|
||||
}
|
||||
|
||||
function inscriptionIdParam(param: string | number) {
|
||||
return InscriptionIdParamCType.Check(param) ? { genesis_id: param } : { number: param };
|
||||
}
|
||||
|
||||
function bigIntParam(param: number | undefined) {
|
||||
return param ? BigInt(param) : undefined;
|
||||
}
|
||||
|
||||
const IndexRoutes: FastifyPluginCallback<Record<never, never>, Server, TypeBoxTypeProvider> = (
|
||||
fastify,
|
||||
options,
|
||||
done
|
||||
) => {
|
||||
fastify.addHook('preHandler', handleInscriptionTransfersCache);
|
||||
fastify.get(
|
||||
'/inscriptions',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getInscriptions',
|
||||
summary: 'List of Inscriptions',
|
||||
description: 'Retrieves a list of inscriptions with options to filter and sort results',
|
||||
tags: ['Inscriptions'],
|
||||
querystring: Type.Object({
|
||||
genesis_block: Type.Optional(BlockParam),
|
||||
from_genesis_block_height: Type.Optional(BlockHeightParam),
|
||||
to_genesis_block_height: Type.Optional(BlockHeightParam),
|
||||
from_genesis_timestamp: Type.Optional(TimestampParam),
|
||||
to_genesis_timestamp: Type.Optional(TimestampParam),
|
||||
from_sat_ordinal: Type.Optional(OrdinalParam),
|
||||
to_sat_ordinal: Type.Optional(OrdinalParam),
|
||||
from_sat_coinbase_height: Type.Optional(BlockHeightParam),
|
||||
to_sat_coinbase_height: Type.Optional(BlockHeightParam),
|
||||
from_number: Type.Optional(InscriptionNumberParam),
|
||||
to_number: Type.Optional(InscriptionNumberParam),
|
||||
id: Type.Optional(InscriptionIdsParam),
|
||||
number: Type.Optional(InscriptionNumbersParam),
|
||||
output: Type.Optional(OutputParam),
|
||||
address: Type.Optional(AddressesParam),
|
||||
genesis_address: Type.Optional(AddressesParam),
|
||||
mime_type: Type.Optional(MimeTypesParam),
|
||||
rarity: Type.Optional(SatoshiRaritiesParam),
|
||||
recursive: Type.Optional(RecursiveParam),
|
||||
cursed: Type.Optional(CursedParam),
|
||||
// Pagination
|
||||
offset: Type.Optional(OffsetParam),
|
||||
limit: Type.Optional(LimitParam),
|
||||
// Ordering
|
||||
order_by: Type.Optional(OrderByParam),
|
||||
order: Type.Optional(OrderParam),
|
||||
}),
|
||||
response: {
|
||||
200: PaginatedResponse(InscriptionResponse, 'Paginated Inscriptions Response'),
|
||||
404: NotFoundResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const limit = request.query.limit ?? DEFAULT_API_LIMIT;
|
||||
const offset = request.query.offset ?? 0;
|
||||
const inscriptions = await fastify.db.getInscriptions(
|
||||
{ limit, offset },
|
||||
{
|
||||
...blockParam(request.query.genesis_block, 'genesis_block'),
|
||||
...blockParam(request.query.from_genesis_block_height, 'from_genesis_block'),
|
||||
...blockParam(request.query.to_genesis_block_height, 'to_genesis_block'),
|
||||
...blockParam(request.query.from_sat_coinbase_height, 'from_sat_coinbase'),
|
||||
...blockParam(request.query.to_sat_coinbase_height, 'to_sat_coinbase'),
|
||||
from_genesis_timestamp: request.query.from_genesis_timestamp,
|
||||
to_genesis_timestamp: request.query.to_genesis_timestamp,
|
||||
from_sat_ordinal: bigIntParam(request.query.from_sat_ordinal),
|
||||
to_sat_ordinal: bigIntParam(request.query.to_sat_ordinal),
|
||||
from_number: request.query.from_number,
|
||||
to_number: request.query.to_number,
|
||||
genesis_id: request.query.id,
|
||||
number: request.query.number,
|
||||
output: request.query.output,
|
||||
address: request.query.address,
|
||||
genesis_address: request.query.genesis_address,
|
||||
mime_type: request.query.mime_type,
|
||||
sat_rarity: request.query.rarity,
|
||||
recursive: request.query.recursive,
|
||||
cursed: request.query.cursed,
|
||||
},
|
||||
{
|
||||
order_by: request.query.order_by ?? OrderBy.genesis_block_height,
|
||||
order: request.query.order ?? Order.desc,
|
||||
}
|
||||
);
|
||||
await reply.send({
|
||||
limit,
|
||||
offset,
|
||||
total: inscriptions.total,
|
||||
results: parseDbInscriptions(inscriptions.results),
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
fastify.get(
|
||||
'/inscriptions/transfers',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getTransfersPerBlock',
|
||||
summary: 'Transfers per block',
|
||||
description:
|
||||
'Retrieves a list of inscription transfers that ocurred at a specific Bitcoin block',
|
||||
tags: ['Inscriptions'],
|
||||
querystring: Type.Object({
|
||||
block: BlockParam,
|
||||
// Pagination
|
||||
offset: Type.Optional(OffsetParam),
|
||||
limit: Type.Optional(LimitParam),
|
||||
}),
|
||||
response: {
|
||||
200: PaginatedResponse(
|
||||
BlockInscriptionTransferSchema,
|
||||
'Paginated Block Transfers Response'
|
||||
),
|
||||
404: NotFoundResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const limit = request.query.limit ?? DEFAULT_API_LIMIT;
|
||||
const offset = request.query.offset ?? 0;
|
||||
const transfers = await fastify.db.getTransfersPerBlock({
|
||||
limit,
|
||||
offset,
|
||||
...blockParam(request.query.block, 'block'),
|
||||
});
|
||||
await reply.send({
|
||||
limit,
|
||||
offset,
|
||||
total: transfers.total,
|
||||
results: parseBlockTransfers(transfers.results),
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
done();
|
||||
};
|
||||
|
||||
const ShowRoutes: FastifyPluginCallback<Record<never, never>, Server, TypeBoxTypeProvider> = (
|
||||
fastify,
|
||||
options,
|
||||
done
|
||||
) => {
|
||||
fastify.addHook('preHandler', handleInscriptionCache);
|
||||
fastify.get(
|
||||
'/inscriptions/:id',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getInscription',
|
||||
summary: 'Specific Inscription',
|
||||
description: 'Retrieves a single inscription',
|
||||
tags: ['Inscriptions'],
|
||||
params: Type.Object({
|
||||
id: InscriptionIdentifierParam,
|
||||
}),
|
||||
response: {
|
||||
200: InscriptionResponse,
|
||||
404: NotFoundResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const inscription = await fastify.db.getInscriptions(
|
||||
{ limit: 1, offset: 0 },
|
||||
{ ...inscriptionIdArrayParam(request.params.id) }
|
||||
);
|
||||
if (inscription.total > 0) {
|
||||
await reply.send(parseDbInscription(inscription.results[0]));
|
||||
} else {
|
||||
await reply.code(404).send(Value.Create(NotFoundResponse));
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
fastify.get(
|
||||
'/inscriptions/:id/content',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getInscriptionContent',
|
||||
summary: 'Inscription content',
|
||||
description: 'Retrieves the contents of a single inscription',
|
||||
tags: ['Inscriptions'],
|
||||
params: Type.Object({
|
||||
id: InscriptionIdentifierParam,
|
||||
}),
|
||||
response: {
|
||||
200: Type.Uint8Array(),
|
||||
404: NotFoundResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const inscription = await fastify.db.getInscriptionContent(
|
||||
inscriptionIdParam(request.params.id)
|
||||
);
|
||||
if (inscription) {
|
||||
const bytes = hexToBuffer(inscription.content);
|
||||
await reply
|
||||
.headers({
|
||||
'content-type': inscription.content_type,
|
||||
'content-length': inscription.content_length,
|
||||
})
|
||||
.send(bytes);
|
||||
} else {
|
||||
await reply.code(404).send(Value.Create(NotFoundResponse));
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
fastify.get(
|
||||
'/inscriptions/:id/transfers',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getInscriptionTransfers',
|
||||
summary: 'Inscription transfers',
|
||||
description: 'Retrieves all transfers for a single inscription',
|
||||
tags: ['Inscriptions'],
|
||||
params: Type.Object({
|
||||
id: InscriptionIdentifierParam,
|
||||
}),
|
||||
querystring: Type.Object({
|
||||
offset: Type.Optional(OffsetParam),
|
||||
limit: Type.Optional(LimitParam),
|
||||
}),
|
||||
response: {
|
||||
200: PaginatedResponse(
|
||||
InscriptionLocationResponseSchema,
|
||||
'Paginated Inscription Locations Response'
|
||||
),
|
||||
404: NotFoundResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const limit = request.query.limit ?? DEFAULT_API_LIMIT;
|
||||
const offset = request.query.offset ?? 0;
|
||||
const locations = await fastify.db.getInscriptionLocations({
|
||||
...inscriptionIdParam(request.params.id),
|
||||
limit,
|
||||
offset,
|
||||
});
|
||||
await reply.send({
|
||||
limit,
|
||||
offset,
|
||||
total: locations.total,
|
||||
results: parseInscriptionLocations(locations.results),
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
done();
|
||||
};
|
||||
|
||||
export const InscriptionsRoutes: FastifyPluginAsync<
|
||||
Record<never, never>,
|
||||
Server,
|
||||
TypeBoxTypeProvider
|
||||
> = async fastify => {
|
||||
await fastify.register(IndexRoutes);
|
||||
await fastify.register(ShowRoutes);
|
||||
};
|
||||
113
api/ordinals/src/api/routes/sats.ts
Normal file
113
api/ordinals/src/api/routes/sats.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
|
||||
import { Type } from '@sinclair/typebox';
|
||||
import { FastifyPluginCallback } from 'fastify';
|
||||
import { Server } from 'http';
|
||||
import {
|
||||
InscriptionResponse,
|
||||
InvalidSatoshiNumberResponse,
|
||||
LimitParam,
|
||||
OffsetParam,
|
||||
OrdinalParam,
|
||||
PaginatedResponse,
|
||||
SatoshiResponse,
|
||||
} from '../schemas';
|
||||
import { OrdinalSatoshi } from '../util/ordinal-satoshi';
|
||||
import { DEFAULT_API_LIMIT, parseDbInscriptions } from '../util/helpers';
|
||||
|
||||
export const SatRoutes: FastifyPluginCallback<Record<never, never>, Server, TypeBoxTypeProvider> = (
|
||||
fastify,
|
||||
options,
|
||||
done
|
||||
) => {
|
||||
fastify.get(
|
||||
'/sats/:ordinal',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getSatoshi',
|
||||
summary: 'Satoshi Ordinal',
|
||||
description: 'Retrieves ordinal information for a single satoshi',
|
||||
tags: ['Satoshis'],
|
||||
params: Type.Object({
|
||||
ordinal: OrdinalParam,
|
||||
}),
|
||||
response: {
|
||||
200: SatoshiResponse,
|
||||
400: InvalidSatoshiNumberResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
let sat: OrdinalSatoshi;
|
||||
try {
|
||||
sat = new OrdinalSatoshi(request.params.ordinal);
|
||||
} catch (error) {
|
||||
await reply.code(400).send({ error: 'Invalid satoshi ordinal number' });
|
||||
return;
|
||||
}
|
||||
const inscriptions = await fastify.db.getInscriptions(
|
||||
{ limit: 1, offset: 0 },
|
||||
{ sat_ordinal: BigInt(request.params.ordinal) }
|
||||
);
|
||||
await reply.send({
|
||||
coinbase_height: sat.blockHeight,
|
||||
cycle: sat.cycle,
|
||||
epoch: sat.epoch,
|
||||
period: sat.period,
|
||||
offset: sat.offset,
|
||||
decimal: sat.decimal,
|
||||
degree: sat.degree,
|
||||
name: sat.name,
|
||||
rarity: sat.rarity,
|
||||
percentile: sat.percentile,
|
||||
inscription_id: inscriptions.results[0]?.genesis_id,
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
fastify.get(
|
||||
'/sats/:ordinal/inscriptions',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getSatoshiInscriptions',
|
||||
summary: 'Satoshi Inscriptions',
|
||||
description: 'Retrieves all inscriptions associated with a single satoshi',
|
||||
tags: ['Satoshis'],
|
||||
params: Type.Object({
|
||||
ordinal: OrdinalParam,
|
||||
}),
|
||||
querystring: Type.Object({
|
||||
// Pagination
|
||||
offset: Type.Optional(OffsetParam),
|
||||
limit: Type.Optional(LimitParam),
|
||||
}),
|
||||
response: {
|
||||
200: PaginatedResponse(InscriptionResponse, 'Paginated Satoshi Inscriptions Response'),
|
||||
400: InvalidSatoshiNumberResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
let sat: OrdinalSatoshi;
|
||||
try {
|
||||
sat = new OrdinalSatoshi(request.params.ordinal);
|
||||
} catch (error) {
|
||||
await reply.code(400).send({ error: 'Invalid satoshi ordinal number' });
|
||||
return;
|
||||
}
|
||||
const limit = request.query.limit ?? DEFAULT_API_LIMIT;
|
||||
const offset = request.query.offset ?? 0;
|
||||
const inscriptions = await fastify.db.getInscriptions(
|
||||
{ limit, offset },
|
||||
{ sat_ordinal: BigInt(sat.ordinal) }
|
||||
);
|
||||
await reply.send({
|
||||
limit,
|
||||
offset,
|
||||
total: inscriptions.total,
|
||||
results: parseDbInscriptions(inscriptions.results),
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
done();
|
||||
};
|
||||
52
api/ordinals/src/api/routes/stats.ts
Normal file
52
api/ordinals/src/api/routes/stats.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
|
||||
import { Type } from '@sinclair/typebox';
|
||||
import { FastifyPluginAsync, FastifyPluginCallback } from 'fastify';
|
||||
import { Server } from 'http';
|
||||
import { BlockHeightParam, InscriptionsPerBlockResponse, NotFoundResponse } from '../schemas';
|
||||
import { handleInscriptionsPerBlockCache } from '../util/cache';
|
||||
import { blockParam } from '../util/helpers';
|
||||
|
||||
const IndexRoutes: FastifyPluginCallback<Record<never, never>, Server, TypeBoxTypeProvider> = (
|
||||
fastify,
|
||||
options,
|
||||
done
|
||||
) => {
|
||||
fastify.addHook('preHandler', handleInscriptionsPerBlockCache);
|
||||
fastify.get(
|
||||
'/stats/inscriptions',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getStatsInscriptionCount',
|
||||
summary: 'Inscription Count per Block',
|
||||
description: 'Retrieves statistics on the number of inscriptions revealed per block',
|
||||
tags: ['Statistics'],
|
||||
querystring: Type.Object({
|
||||
from_block_height: Type.Optional(BlockHeightParam),
|
||||
to_block_height: Type.Optional(BlockHeightParam),
|
||||
}),
|
||||
response: {
|
||||
200: InscriptionsPerBlockResponse,
|
||||
404: NotFoundResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const inscriptions = await fastify.db.counts.getInscriptionCountPerBlock({
|
||||
...blockParam(request.query.from_block_height, 'from_block'),
|
||||
...blockParam(request.query.to_block_height, 'to_block'),
|
||||
});
|
||||
await reply.send({
|
||||
results: inscriptions,
|
||||
});
|
||||
}
|
||||
);
|
||||
done();
|
||||
};
|
||||
|
||||
export const StatsRoutes: FastifyPluginAsync<
|
||||
Record<never, never>,
|
||||
Server,
|
||||
TypeBoxTypeProvider
|
||||
> = async fastify => {
|
||||
await fastify.register(IndexRoutes);
|
||||
};
|
||||
44
api/ordinals/src/api/routes/status.ts
Normal file
44
api/ordinals/src/api/routes/status.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
|
||||
import { FastifyPluginCallback } from 'fastify';
|
||||
import { Server } from 'http';
|
||||
import { ApiStatusResponse } from '../schemas';
|
||||
import { SERVER_VERSION } from '@hirosystems/api-toolkit';
|
||||
import { handleInscriptionTransfersCache } from '../util/cache';
|
||||
|
||||
export const StatusRoutes: FastifyPluginCallback<
|
||||
Record<never, never>,
|
||||
Server,
|
||||
TypeBoxTypeProvider
|
||||
> = (fastify, options, done) => {
|
||||
fastify.addHook('preHandler', handleInscriptionTransfersCache);
|
||||
fastify.get(
|
||||
'/',
|
||||
{
|
||||
schema: {
|
||||
operationId: 'getApiStatus',
|
||||
summary: 'API Status',
|
||||
description: 'Displays the status of the API',
|
||||
tags: ['Status'],
|
||||
response: {
|
||||
200: ApiStatusResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (request, reply) => {
|
||||
const result = await fastify.db.sqlTransaction(async sql => {
|
||||
const block_height = await fastify.db.getChainTipBlockHeight();
|
||||
const max_inscription_number = await fastify.db.getMaxInscriptionNumber();
|
||||
const max_cursed_inscription_number = await fastify.db.getMaxCursedInscriptionNumber();
|
||||
return {
|
||||
server_version: `ordinals-api ${SERVER_VERSION.tag} (${SERVER_VERSION.branch}:${SERVER_VERSION.commit})`,
|
||||
status: 'ready',
|
||||
block_height,
|
||||
max_inscription_number,
|
||||
max_cursed_inscription_number,
|
||||
};
|
||||
});
|
||||
await reply.send(result);
|
||||
}
|
||||
);
|
||||
done();
|
||||
};
|
||||
563
api/ordinals/src/api/schemas.ts
Normal file
563
api/ordinals/src/api/schemas.ts
Normal file
@@ -0,0 +1,563 @@
|
||||
import { SwaggerOptions } from '@fastify/swagger';
|
||||
import { SERVER_VERSION } from '@hirosystems/api-toolkit';
|
||||
import { Static, TSchema, Type } from '@sinclair/typebox';
|
||||
import { TypeCompiler } from '@sinclair/typebox/compiler';
|
||||
import { SAT_SUPPLY, SatoshiRarity } from './util/ordinal-satoshi';
|
||||
|
||||
export const OpenApiSchemaOptions: SwaggerOptions = {
|
||||
openapi: {
|
||||
info: {
|
||||
title: 'Ordinals API',
|
||||
description: `
|
||||
The [Ordinals API](https://docs.hiro.so/ordinals-api) is a service that indexes Bitcoin Ordinals data and exposes it via REST API endpoints.
|
||||
|
||||
Here are the key features of the Ordinals API:
|
||||
|
||||
- **Ordinal Inscription Ingestion**: The Ordinals API helps with the complete ingestion of ordinal inscriptions. Using our endpoints, you can retrieve the metadata for a particular inscription, all inscriptions held by a particular address, trading activity for inscriptions, and more.
|
||||
|
||||
- **BRC-20 Support**: The Ordinals API also offers support for BRC-20 tokens, a fungible token standard built on top of ordinal theory. Retrieve data for a particular BRC-20 token, a user's BRC-20 holdings, marketplace activity, and more.
|
||||
|
||||
- **REST JSON Endpoints with ETag Caching**: The Ordinals API provides easy-to-use REST endpoints that return responses in JSON format. It also supports ETag caching, which allows you to cache responses based on inscriptions. This helps optimize performance and reduce unnecessary requests.
|
||||
|
||||
|
||||
The source code for this project is available in our [GitHub repository](https://github.com/hirosystems/ordhook). You can explore the codebase, contribute, and raise issues or pull requests.
|
||||
`,
|
||||
version: SERVER_VERSION.tag,
|
||||
},
|
||||
externalDocs: {
|
||||
url: 'https://github.com/hirosystems/ordhook',
|
||||
description: 'Source Repository',
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: 'https://api.hiro.so/',
|
||||
description: 'mainnet',
|
||||
},
|
||||
],
|
||||
tags: [
|
||||
{
|
||||
name: 'Inscriptions',
|
||||
description: 'Endpoints to query ordinal inscriptions',
|
||||
},
|
||||
{
|
||||
name: 'Satoshis',
|
||||
description: 'Endpoints to query Satoshi ordinal and rarity information',
|
||||
},
|
||||
{
|
||||
name: 'BRC-20',
|
||||
description: 'Endpoints to query BRC-20 token balances and events',
|
||||
},
|
||||
{
|
||||
name: 'Statistics',
|
||||
description: 'Endpoints to query statistics on ordinal inscription data',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
// ==========================
|
||||
// Parameters
|
||||
// ==========================
|
||||
|
||||
const Nullable = <T extends TSchema>(type: T) => Type.Union([type, Type.Null()]);
|
||||
|
||||
export const AddressParam = Type.String({
|
||||
title: 'Address',
|
||||
description: 'Bitcoin address',
|
||||
examples: ['bc1p8aq8s3z9xl87e74twfk93mljxq6alv4a79yheadx33t9np4g2wkqqt8kc5'],
|
||||
});
|
||||
|
||||
export const AddressesParam = Type.Array(AddressParam, {
|
||||
title: 'Addresses',
|
||||
description: 'Array of Bitcoin addresses',
|
||||
examples: [
|
||||
[
|
||||
'bc1p8aq8s3z9xl87e74twfk93mljxq6alv4a79yheadx33t9np4g2wkqqt8kc5',
|
||||
'bc1pscktlmn99gyzlvymvrezh6vwd0l4kg06tg5rvssw0czg8873gz5sdkteqj',
|
||||
],
|
||||
],
|
||||
});
|
||||
|
||||
export const Brc20TickerParam = Type.String();
|
||||
|
||||
export const Brc20TickersParam = Type.Array(Brc20TickerParam);
|
||||
|
||||
const InscriptionIdParam = Type.RegEx(/^[a-fA-F0-9]{64}i[0-9]+$/, {
|
||||
title: 'Inscription ID',
|
||||
description: 'Inscription ID',
|
||||
examples: ['38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0'],
|
||||
});
|
||||
export const InscriptionIdParamCType = TypeCompiler.Compile(InscriptionIdParam);
|
||||
|
||||
export const InscriptionIdsParam = Type.Array(InscriptionIdParam, {
|
||||
title: 'Inscription IDs',
|
||||
description: 'Array of inscription IDs',
|
||||
examples: [
|
||||
[
|
||||
'38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
'e3af144354367de58c675e987febcb49f17d6c19e645728b833fe95408feab85i0',
|
||||
],
|
||||
],
|
||||
});
|
||||
|
||||
export const InscriptionNumberParam = Type.Integer({
|
||||
title: 'Inscription Number',
|
||||
description: 'Inscription number',
|
||||
examples: ['10500'],
|
||||
});
|
||||
export const InscriptionNumberParamCType = TypeCompiler.Compile(InscriptionNumberParam);
|
||||
|
||||
export const InscriptionNumbersParam = Type.Array(InscriptionNumberParam, {
|
||||
title: 'Inscription Numbers',
|
||||
description: 'Array of inscription numbers',
|
||||
examples: [['10500', '65']],
|
||||
});
|
||||
|
||||
export const InscriptionIdentifierParam = Type.Union([InscriptionIdParam, InscriptionNumberParam], {
|
||||
title: 'Inscription Identifier',
|
||||
description: 'Inscription unique identifier (number or ID)',
|
||||
examples: ['145000', '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0'],
|
||||
});
|
||||
|
||||
export const OrdinalParam = Type.Integer({
|
||||
title: 'Ordinal Number',
|
||||
description: 'Ordinal number that uniquely identifies a satoshi',
|
||||
examples: [257418248345364],
|
||||
minimum: 0,
|
||||
exclusiveMaximum: SAT_SUPPLY,
|
||||
});
|
||||
|
||||
export const BlockHeightParam = Type.RegEx(/^[0-9]+$/, {
|
||||
title: 'Block Height',
|
||||
description: 'Bitcoin block height',
|
||||
examples: [777678],
|
||||
});
|
||||
export const BlockHeightParamCType = TypeCompiler.Compile(BlockHeightParam);
|
||||
|
||||
const BlockHashParam = Type.RegEx(/^[0]{8}[a-fA-F0-9]{56}$/, {
|
||||
title: 'Block Hash',
|
||||
description: 'Bitcoin block hash',
|
||||
examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
|
||||
});
|
||||
export const BlockHashParamCType = TypeCompiler.Compile(BlockHashParam);
|
||||
|
||||
export const BlockParam = Type.Union([BlockHashParam, BlockHeightParam], {
|
||||
title: 'Block Identifier',
|
||||
description: 'Bitcoin block identifier (height or hash)',
|
||||
examples: [777654, '0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
|
||||
});
|
||||
|
||||
export const MimeTypesParam = Type.Array(
|
||||
Type.RegEx(/^\w+\/[-.\w]+(?:\+[-.\w]+)?$/, {
|
||||
title: 'MIME Type',
|
||||
description: 'MIME type for an inscription content',
|
||||
examples: ['image/png'],
|
||||
}),
|
||||
{
|
||||
title: 'MIME Types',
|
||||
description: 'Array of inscription MIME types',
|
||||
examples: [['image/png', 'image/jpeg']],
|
||||
}
|
||||
);
|
||||
|
||||
export const SatoshiRaritiesParam = Type.Array(
|
||||
Type.Enum(SatoshiRarity, {
|
||||
title: 'Rarity',
|
||||
description: 'Rarity of a single satoshi according to Ordinal Theory',
|
||||
examples: ['uncommon'],
|
||||
}),
|
||||
{
|
||||
title: 'Rarity',
|
||||
description: 'Array of satoshi rarity values',
|
||||
examples: [['common', 'uncommon']],
|
||||
}
|
||||
);
|
||||
|
||||
export const TimestampParam = Type.Integer({
|
||||
title: 'Timestamp',
|
||||
description: 'Block UNIX epoch timestamp (milliseconds)',
|
||||
examples: [1677731361],
|
||||
});
|
||||
|
||||
export const OutputParam = Type.RegEx(/^[a-fA-F0-9]{64}:[0-9]+$/, {
|
||||
title: 'Transaction Output',
|
||||
description: 'An UTXO for a Bitcoin transaction',
|
||||
examples: ['8f46f0d4ef685e650727e6faf7e30f23b851a7709714ec774f7909b3fb5e604c:0'],
|
||||
});
|
||||
|
||||
export const RecursiveParam = Type.Boolean({
|
||||
title: 'Recursive',
|
||||
description: 'Whether or not the inscription is recursive',
|
||||
examples: [false],
|
||||
});
|
||||
|
||||
export const CursedParam = Type.Boolean({
|
||||
title: 'Cursed',
|
||||
description: 'Whether or not the inscription is cursed',
|
||||
examples: [false],
|
||||
});
|
||||
|
||||
export const OffsetParam = Type.Integer({
|
||||
minimum: 0,
|
||||
title: 'Offset',
|
||||
description: 'Result offset',
|
||||
});
|
||||
|
||||
export const LimitParam = Type.Integer({
|
||||
minimum: 1,
|
||||
maximum: 60,
|
||||
title: 'Limit',
|
||||
description: 'Results per page',
|
||||
});
|
||||
|
||||
const Brc20OperationParam = Type.Union(
|
||||
[
|
||||
Type.Literal('deploy'),
|
||||
Type.Literal('mint'),
|
||||
Type.Literal('transfer'),
|
||||
Type.Literal('transfer_send'),
|
||||
],
|
||||
{
|
||||
title: 'Operation',
|
||||
description:
|
||||
'BRC-20 token operation. Note that a BRC-20 transfer is a two step process `transfer` (creating the inscription, which makes funds transferrable) and `transfer_send` (sending the inscription to a recipient, which moves the funds)',
|
||||
examples: ['deploy', 'mint', 'transfer', 'transfer_send'],
|
||||
}
|
||||
);
|
||||
|
||||
export const Brc20OperationsParam = Type.Array(Brc20OperationParam);
|
||||
|
||||
export enum Brc20TokenOrderBy {
|
||||
tx_count = 'tx_count',
|
||||
index = 'index',
|
||||
}
|
||||
export const Brc20TokensOrderByParam = Type.Enum(Brc20TokenOrderBy, {
|
||||
title: 'Order By',
|
||||
description: 'Parameter to order results by',
|
||||
});
|
||||
|
||||
export enum OrderBy {
|
||||
number = 'number',
|
||||
genesis_block_height = 'genesis_block_height',
|
||||
ordinal = 'ordinal',
|
||||
rarity = 'rarity',
|
||||
}
|
||||
export const OrderByParam = Type.Enum(OrderBy, {
|
||||
title: 'Order By',
|
||||
description: 'Parameter to order results by',
|
||||
});
|
||||
|
||||
export enum Order {
|
||||
asc = 'asc',
|
||||
desc = 'desc',
|
||||
}
|
||||
export const OrderParam = Type.Enum(Order, {
|
||||
title: 'Order',
|
||||
description: 'Results order',
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Responses
|
||||
// ==========================
|
||||
|
||||
export const PaginatedResponse = <T extends TSchema>(type: T, title: string) =>
|
||||
Type.Object(
|
||||
{
|
||||
limit: Type.Integer({ examples: [20] }),
|
||||
offset: Type.Integer({ examples: [0] }),
|
||||
total: Type.Integer({ examples: [1] }),
|
||||
results: Type.Array(type),
|
||||
},
|
||||
{ title }
|
||||
);
|
||||
|
||||
export const InscriptionResponse = Type.Object(
|
||||
{
|
||||
id: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218i0'],
|
||||
}),
|
||||
number: Type.Integer({ examples: [248751] }),
|
||||
address: Nullable(
|
||||
Type.String({
|
||||
examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'],
|
||||
})
|
||||
),
|
||||
genesis_address: Nullable(
|
||||
Type.String({
|
||||
examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'],
|
||||
})
|
||||
),
|
||||
genesis_block_height: Type.Integer({ examples: [778921] }),
|
||||
genesis_block_hash: Type.String({
|
||||
examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
|
||||
}),
|
||||
genesis_tx_id: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218'],
|
||||
}),
|
||||
genesis_fee: Type.String({ examples: ['3179'] }),
|
||||
genesis_timestamp: Type.Integer({ exmaples: [1677733170000] }),
|
||||
tx_id: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218'],
|
||||
}),
|
||||
location: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218:0:0'],
|
||||
}),
|
||||
output: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218:0'],
|
||||
}),
|
||||
value: Nullable(Type.String({ examples: ['546'] })),
|
||||
offset: Nullable(Type.String({ examples: ['0'] })),
|
||||
sat_ordinal: Type.String({ examples: ['1232735286933201'] }),
|
||||
sat_rarity: Type.String({ examples: ['common'] }),
|
||||
sat_coinbase_height: Type.Integer({ examples: [430521] }),
|
||||
mime_type: Type.String({ examples: ['text/plain'] }),
|
||||
content_type: Type.String({ examples: ['text/plain;charset=utf-8'] }),
|
||||
content_length: Type.Integer({ examples: [59] }),
|
||||
timestamp: Type.Integer({ examples: [1677733170000] }),
|
||||
curse_type: Nullable(Type.String({ examples: ['p2wsh'] })),
|
||||
recursive: Type.Boolean({ examples: [true] }),
|
||||
recursion_refs: Nullable(
|
||||
Type.Array(
|
||||
Type.String({
|
||||
examples: [
|
||||
'1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218i0',
|
||||
'541076e29e1b63460412d3087b37130c9a14abd0beeb4e9b2b805d2072c84dedi0',
|
||||
],
|
||||
})
|
||||
)
|
||||
),
|
||||
parent: Nullable(
|
||||
Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218i0'],
|
||||
})
|
||||
),
|
||||
delegate: Nullable(
|
||||
Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218i0'],
|
||||
})
|
||||
),
|
||||
metadata: Nullable(Type.Any()),
|
||||
meta_protocol: Nullable(
|
||||
Type.String({
|
||||
examples: ['brc20'],
|
||||
})
|
||||
),
|
||||
},
|
||||
{ title: 'Inscription Response' }
|
||||
);
|
||||
export type InscriptionResponseType = Static<typeof InscriptionResponse>;
|
||||
|
||||
export const SatoshiResponse = Type.Object(
|
||||
{
|
||||
coinbase_height: Type.Integer({ examples: [752860] }),
|
||||
cycle: Type.Integer({ examples: [0] }),
|
||||
decimal: Type.String({ examples: ['752860.20444193'] }),
|
||||
degree: Type.String({ examples: ['0°122860′892″20444193‴'] }),
|
||||
inscription_id: Type.Optional(
|
||||
Type.String({
|
||||
examples: ['ff4503ab9048d6d0ff4e23def81b614d5270d341ce993992e93902ceb0d4ed79i0'],
|
||||
})
|
||||
),
|
||||
epoch: Type.Number({ examples: [3] }),
|
||||
name: Type.String({ examples: ['ahehcbywzae'] }),
|
||||
offset: Type.Number({ examples: [20444193] }),
|
||||
percentile: Type.String({ examples: ['91.15654869285287%'] }),
|
||||
period: Type.Integer({ examples: [373] }),
|
||||
rarity: Type.Enum(SatoshiRarity, { examples: ['common'] }),
|
||||
},
|
||||
{ title: 'Satoshi Response' }
|
||||
);
|
||||
|
||||
export const ApiStatusResponse = Type.Object(
|
||||
{
|
||||
server_version: Type.String({ examples: [''] }),
|
||||
status: Type.String(),
|
||||
block_height: Type.Optional(Type.Integer()),
|
||||
max_inscription_number: Type.Optional(Type.Integer()),
|
||||
max_cursed_inscription_number: Type.Optional(Type.Integer()),
|
||||
},
|
||||
{ title: 'Api Status Response' }
|
||||
);
|
||||
|
||||
export const InscriptionLocationResponseSchema = Type.Object(
|
||||
{
|
||||
block_height: Type.Integer({ examples: [778921] }),
|
||||
block_hash: Type.String({
|
||||
examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
|
||||
}),
|
||||
address: Nullable(
|
||||
Type.String({
|
||||
examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'],
|
||||
})
|
||||
),
|
||||
tx_id: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218'],
|
||||
}),
|
||||
location: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218:0:0'],
|
||||
}),
|
||||
output: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218:0'],
|
||||
}),
|
||||
value: Nullable(Type.String({ examples: ['546'] })),
|
||||
offset: Nullable(Type.String({ examples: ['0'] })),
|
||||
timestamp: Type.Integer({ examples: [1677733170000] }),
|
||||
},
|
||||
{ title: 'Inscription Location Response' }
|
||||
);
|
||||
export type InscriptionLocationResponse = Static<typeof InscriptionLocationResponseSchema>;
|
||||
|
||||
export const BlockInscriptionTransferSchema = Type.Object({
|
||||
id: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218i0'],
|
||||
}),
|
||||
number: Type.Integer({ examples: [248751] }),
|
||||
from: InscriptionLocationResponseSchema,
|
||||
to: InscriptionLocationResponseSchema,
|
||||
});
|
||||
export type BlockInscriptionTransfer = Static<typeof BlockInscriptionTransferSchema>;
|
||||
|
||||
export const Brc20BalanceResponseSchema = Type.Object({
|
||||
ticker: Type.String({ examples: ['PEPE'] }),
|
||||
available_balance: Type.String({ examples: ['1500.00000'] }),
|
||||
transferrable_balance: Type.String({ examples: ['500.00000'] }),
|
||||
overall_balance: Type.String({ examples: ['2000.00000'] }),
|
||||
});
|
||||
export type Brc20BalanceResponse = Static<typeof Brc20BalanceResponseSchema>;
|
||||
|
||||
export const Brc20ActivityResponseSchema = Type.Object({
|
||||
operation: Type.Union([
|
||||
Type.Literal('deploy'),
|
||||
Type.Literal('mint'),
|
||||
Type.Literal('transfer'),
|
||||
Type.Literal('transfer_send'),
|
||||
]),
|
||||
ticker: Type.String({ examples: ['PEPE'] }),
|
||||
inscription_id: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218i0'],
|
||||
}),
|
||||
block_height: Type.Integer({ examples: [778921] }),
|
||||
block_hash: Type.String({
|
||||
examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
|
||||
}),
|
||||
tx_id: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218'],
|
||||
}),
|
||||
location: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218:0:0'],
|
||||
}),
|
||||
address: Type.String({
|
||||
examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'],
|
||||
}),
|
||||
timestamp: Type.Integer({ examples: [1677733170000] }),
|
||||
mint: Type.Optional(
|
||||
Type.Object({
|
||||
amount: Nullable(Type.String({ examples: ['1000000'] })),
|
||||
})
|
||||
),
|
||||
deploy: Type.Optional(
|
||||
Type.Object({
|
||||
max_supply: Type.String({ examples: ['21000000'] }),
|
||||
mint_limit: Nullable(Type.String({ examples: ['100000'] })),
|
||||
decimals: Type.Integer({ examples: [18] }),
|
||||
})
|
||||
),
|
||||
transfer: Type.Optional(
|
||||
Type.Object({
|
||||
amount: Type.String({ examples: ['1000000'] }),
|
||||
from_address: Type.String({
|
||||
examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'],
|
||||
}),
|
||||
})
|
||||
),
|
||||
transfer_send: Type.Optional(
|
||||
Type.Object({
|
||||
amount: Type.String({ examples: ['1000000'] }),
|
||||
from_address: Type.String({
|
||||
examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'],
|
||||
}),
|
||||
to_address: Type.String({
|
||||
examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'],
|
||||
}),
|
||||
})
|
||||
),
|
||||
});
|
||||
export type Brc20ActivityResponse = Static<typeof Brc20ActivityResponseSchema>;
|
||||
|
||||
export const Brc20TokenResponseSchema = Type.Object(
|
||||
{
|
||||
id: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218i0'],
|
||||
}),
|
||||
number: Type.Integer({ examples: [248751] }),
|
||||
block_height: Type.Integer({ examples: [752860] }),
|
||||
tx_id: Type.String({
|
||||
examples: ['1463d48e9248159084929294f64bda04487503d30ce7ab58365df1dc6fd58218'],
|
||||
}),
|
||||
address: Type.String({
|
||||
examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'],
|
||||
}),
|
||||
ticker: Type.String({ examples: ['PEPE'] }),
|
||||
max_supply: Type.String({ examples: ['21000000'] }),
|
||||
mint_limit: Nullable(Type.String({ examples: ['100000'] })),
|
||||
decimals: Type.Integer({ examples: [18] }),
|
||||
deploy_timestamp: Type.Integer({ examples: [1677733170000] }),
|
||||
minted_supply: Type.String({ examples: ['1000000'] }),
|
||||
tx_count: Type.Integer({ examples: [300000] }),
|
||||
self_mint: Type.Boolean(),
|
||||
},
|
||||
{ title: 'BRC-20 Token Response' }
|
||||
);
|
||||
export type Brc20TokenResponse = Static<typeof Brc20TokenResponseSchema>;
|
||||
|
||||
const Brc20SupplySchema = Type.Object({
|
||||
max_supply: Type.String({ examples: ['21000000'] }),
|
||||
minted_supply: Type.String({ examples: ['1000000'] }),
|
||||
holders: Type.Integer({ examples: [240] }),
|
||||
});
|
||||
export type Brc20Supply = Static<typeof Brc20SupplySchema>;
|
||||
|
||||
export const Brc20HolderResponseSchema = Type.Object({
|
||||
address: Type.String({
|
||||
examples: ['bc1pvwh2dl6h388x65rqq47qjzdmsqgkatpt4hye6daf7yxvl0z3xjgq247aq8'],
|
||||
}),
|
||||
overall_balance: Type.String({ examples: ['2000.00000'] }),
|
||||
});
|
||||
export type Brc20HolderResponse = Static<typeof Brc20HolderResponseSchema>;
|
||||
|
||||
export const Brc20TokenDetailsSchema = Type.Object(
|
||||
{
|
||||
token: Brc20TokenResponseSchema,
|
||||
supply: Brc20SupplySchema,
|
||||
},
|
||||
{ title: 'BRC-20 Token Details Response' }
|
||||
);
|
||||
type Brc20TokenDetails = Static<typeof Brc20TokenDetailsSchema>;
|
||||
|
||||
export const NotFoundResponse = Type.Object(
|
||||
{
|
||||
error: Type.Literal('Not found'),
|
||||
},
|
||||
{ title: 'Not Found Response' }
|
||||
);
|
||||
|
||||
export const InvalidSatoshiNumberResponse = Type.Object(
|
||||
{
|
||||
error: Type.Literal('Invalid satoshi ordinal number'),
|
||||
},
|
||||
{ title: 'Invalid Satoshi Number Response' }
|
||||
);
|
||||
|
||||
const InscriptionsPerBlock = Type.Object({
|
||||
block_height: Type.String({ examples: ['778921'] }),
|
||||
block_hash: Type.String({
|
||||
examples: ['0000000000000000000452773967cdd62297137cdaf79950c5e8bb0c62075133'],
|
||||
}),
|
||||
inscription_count: Type.String({ examples: ['100'] }),
|
||||
inscription_count_accum: Type.String({ examples: ['3100'] }),
|
||||
timestamp: Type.Integer({ examples: [1677733170000] }),
|
||||
});
|
||||
export const InscriptionsPerBlockResponse = Type.Object({
|
||||
results: Type.Array(InscriptionsPerBlock),
|
||||
});
|
||||
export type InscriptionsPerBlockResponse = Static<typeof InscriptionsPerBlockResponse>;
|
||||
87
api/ordinals/src/api/util/cache.ts
Normal file
87
api/ordinals/src/api/util/cache.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { FastifyReply, FastifyRequest } from 'fastify';
|
||||
import { InscriptionIdParamCType, InscriptionNumberParamCType } from '../schemas';
|
||||
import { CACHE_CONTROL_MUST_REVALIDATE, parseIfNoneMatchHeader } from '@hirosystems/api-toolkit';
|
||||
|
||||
enum ETagType {
|
||||
inscriptionsIndex,
|
||||
inscription,
|
||||
inscriptionsPerBlock,
|
||||
}
|
||||
|
||||
export async function handleInscriptionCache(request: FastifyRequest, reply: FastifyReply) {
|
||||
return handleCache(ETagType.inscription, request, reply);
|
||||
}
|
||||
|
||||
export async function handleInscriptionTransfersCache(
|
||||
request: FastifyRequest,
|
||||
reply: FastifyReply
|
||||
) {
|
||||
return handleCache(ETagType.inscriptionsIndex, request, reply);
|
||||
}
|
||||
|
||||
export async function handleInscriptionsPerBlockCache(
|
||||
request: FastifyRequest,
|
||||
reply: FastifyReply
|
||||
) {
|
||||
return handleCache(ETagType.inscriptionsPerBlock, request, reply);
|
||||
}
|
||||
|
||||
async function handleCache(type: ETagType, request: FastifyRequest, reply: FastifyReply) {
|
||||
const ifNoneMatch = parseIfNoneMatchHeader(request.headers['if-none-match']);
|
||||
let etag: string | undefined;
|
||||
switch (type) {
|
||||
case ETagType.inscription:
|
||||
etag = await getInscriptionLocationEtag(request);
|
||||
break;
|
||||
case ETagType.inscriptionsIndex:
|
||||
etag = await getInscriptionsIndexEtag(request);
|
||||
break;
|
||||
case ETagType.inscriptionsPerBlock:
|
||||
etag = await request.server.db.getInscriptionsPerBlockETag();
|
||||
break;
|
||||
}
|
||||
if (etag) {
|
||||
if (ifNoneMatch && ifNoneMatch.includes(etag)) {
|
||||
await reply.header('Cache-Control', CACHE_CONTROL_MUST_REVALIDATE).code(304).send();
|
||||
} else {
|
||||
void reply.headers({ 'Cache-Control': CACHE_CONTROL_MUST_REVALIDATE, ETag: `"${etag}"` });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the inscriptions's location timestamp as a UNIX epoch so we can use it as the response
|
||||
* ETag.
|
||||
* @param request - Fastify request
|
||||
* @returns Etag string
|
||||
*/
|
||||
async function getInscriptionLocationEtag(request: FastifyRequest): Promise<string | undefined> {
|
||||
try {
|
||||
const components = request.url.split('/');
|
||||
do {
|
||||
const lastElement = components.pop();
|
||||
if (lastElement && lastElement.length) {
|
||||
if (InscriptionIdParamCType.Check(lastElement)) {
|
||||
return await request.server.db.getInscriptionETag({ genesis_id: lastElement });
|
||||
} else if (InscriptionNumberParamCType.Check(parseInt(lastElement))) {
|
||||
return await request.server.db.getInscriptionETag({ number: lastElement });
|
||||
}
|
||||
}
|
||||
} while (components.length);
|
||||
} catch (error) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an ETag based on the last state of all inscriptions.
|
||||
* @param request - Fastify request
|
||||
* @returns ETag string
|
||||
*/
|
||||
async function getInscriptionsIndexEtag(request: FastifyRequest): Promise<string | undefined> {
|
||||
try {
|
||||
return await request.server.db.getInscriptionsIndexETag();
|
||||
} catch (error) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
258
api/ordinals/src/api/util/helpers.ts
Normal file
258
api/ordinals/src/api/util/helpers.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import BigNumber from 'bignumber.js';
|
||||
import {
|
||||
DbBrc20Activity,
|
||||
DbBrc20Balance,
|
||||
DbBrc20EventOperation,
|
||||
DbBrc20Holder,
|
||||
DbBrc20Token,
|
||||
DbBrc20TokenWithSupply,
|
||||
} from '../../pg/brc20/types';
|
||||
import {
|
||||
DbFullyLocatedInscriptionResult,
|
||||
DbInscriptionLocationChange,
|
||||
DbLocation,
|
||||
} from '../../pg/types';
|
||||
import {
|
||||
BlockHashParamCType,
|
||||
BlockHeightParamCType,
|
||||
BlockInscriptionTransfer,
|
||||
Brc20ActivityResponse,
|
||||
Brc20BalanceResponse,
|
||||
Brc20HolderResponse,
|
||||
Brc20Supply,
|
||||
Brc20TokenResponse,
|
||||
InscriptionLocationResponse,
|
||||
InscriptionResponseType,
|
||||
} from '../schemas';
|
||||
|
||||
export const DEFAULT_API_LIMIT = 20;
|
||||
|
||||
function parseTimestamp(timestamp: number): number {
|
||||
return timestamp * 1000;
|
||||
}
|
||||
|
||||
export function parseDbInscriptions(
|
||||
items: DbFullyLocatedInscriptionResult[]
|
||||
): InscriptionResponseType[] {
|
||||
return items.map(i => ({
|
||||
id: i.genesis_id,
|
||||
number: parseInt(i.number),
|
||||
address: i.address,
|
||||
genesis_address: i.genesis_address,
|
||||
genesis_block_height: parseInt(i.genesis_block_height),
|
||||
genesis_block_hash: i.genesis_block_hash,
|
||||
genesis_tx_id: i.genesis_tx_id,
|
||||
genesis_fee: i.genesis_fee.toString(),
|
||||
genesis_timestamp: parseTimestamp(i.genesis_timestamp),
|
||||
tx_id: i.tx_id,
|
||||
location: `${i.output}:${i.offset}`,
|
||||
output: i.output,
|
||||
value: i.value,
|
||||
offset: i.offset,
|
||||
sat_ordinal: i.sat_ordinal.toString(),
|
||||
sat_rarity: i.sat_rarity,
|
||||
sat_coinbase_height: parseInt(i.sat_coinbase_height),
|
||||
mime_type: i.mime_type,
|
||||
content_type: i.content_type,
|
||||
content_length: parseInt(i.content_length),
|
||||
timestamp: parseTimestamp(i.timestamp),
|
||||
curse_type: i.curse_type,
|
||||
recursive: i.recursive,
|
||||
recursion_refs: i.recursion_refs?.split(',') ?? null,
|
||||
parent: i.parent,
|
||||
metadata: i.metadata ? JSON.parse(i.metadata) : null,
|
||||
delegate: i.delegate ?? null,
|
||||
meta_protocol: i.metaprotocol ?? null,
|
||||
}));
|
||||
}
|
||||
export function parseDbInscription(item: DbFullyLocatedInscriptionResult): InscriptionResponseType {
|
||||
return parseDbInscriptions([item])[0];
|
||||
}
|
||||
|
||||
export function parseInscriptionLocations(items: DbLocation[]): InscriptionLocationResponse[] {
|
||||
return items.map(i => ({
|
||||
block_height: parseInt(i.block_height),
|
||||
block_hash: i.block_hash,
|
||||
address: i.address,
|
||||
tx_id: i.tx_id,
|
||||
location: `${i.output}:${i.offset}`,
|
||||
output: i.output,
|
||||
value: i.value,
|
||||
offset: i.offset,
|
||||
timestamp: parseTimestamp(i.timestamp),
|
||||
}));
|
||||
}
|
||||
|
||||
export function parseBlockTransfers(
|
||||
items: DbInscriptionLocationChange[]
|
||||
): BlockInscriptionTransfer[] {
|
||||
return items.map(i => ({
|
||||
id: i.genesis_id,
|
||||
number: parseInt(i.number),
|
||||
from: {
|
||||
block_height: parseInt(i.from_block_height),
|
||||
block_hash: i.from_block_hash,
|
||||
address: i.from_address,
|
||||
tx_id: i.from_tx_id,
|
||||
location: `${i.from_output}:${i.from_offset}`,
|
||||
output: i.from_output,
|
||||
value: i.from_value,
|
||||
offset: i.from_offset,
|
||||
timestamp: parseTimestamp(i.from_timestamp),
|
||||
},
|
||||
to: {
|
||||
block_height: parseInt(i.to_block_height),
|
||||
block_hash: i.to_block_hash,
|
||||
address: i.to_address,
|
||||
tx_id: i.to_tx_id,
|
||||
location: `${i.to_output}:${i.to_offset}`,
|
||||
output: i.to_output,
|
||||
value: i.to_value,
|
||||
offset: i.to_offset,
|
||||
timestamp: parseTimestamp(i.to_timestamp),
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
export function parseBrc20Tokens(items: DbBrc20Token[]): Brc20TokenResponse[] {
|
||||
return items.map(i => ({
|
||||
id: i.inscription_id,
|
||||
number: parseInt(i.inscription_number),
|
||||
block_height: parseInt(i.block_height),
|
||||
tx_id: i.tx_id,
|
||||
address: i.address,
|
||||
ticker: i.ticker,
|
||||
max_supply: decimals(i.max, i.decimals),
|
||||
mint_limit: i.limit ? decimals(i.limit, i.decimals) : null,
|
||||
decimals: i.decimals,
|
||||
deploy_timestamp: parseTimestamp(i.timestamp),
|
||||
minted_supply: decimals(i.minted_supply, i.decimals),
|
||||
tx_count: parseInt(i.tx_count),
|
||||
self_mint: i.self_mint,
|
||||
}));
|
||||
}
|
||||
|
||||
export function parseBrc20Supply(item: DbBrc20TokenWithSupply): Brc20Supply {
|
||||
return {
|
||||
max_supply: decimals(item.max, item.decimals),
|
||||
minted_supply: decimals(item.minted_supply, item.decimals),
|
||||
holders: parseInt(item.holders),
|
||||
};
|
||||
}
|
||||
|
||||
export function parseBrc20Balances(items: DbBrc20Balance[]): Brc20BalanceResponse[] {
|
||||
return items.map(i => ({
|
||||
ticker: i.ticker,
|
||||
available_balance: decimals(i.avail_balance, i.decimals),
|
||||
transferrable_balance: decimals(i.trans_balance, i.decimals),
|
||||
overall_balance: decimals(i.total_balance, i.decimals),
|
||||
}));
|
||||
}
|
||||
|
||||
export function parseBrc20Activities(items: DbBrc20Activity[]): Brc20ActivityResponse[] {
|
||||
return items.map(i => {
|
||||
const activity = {
|
||||
operation: i.operation,
|
||||
ticker: i.ticker,
|
||||
address: i.to_address ?? i.address,
|
||||
tx_id: i.tx_id,
|
||||
inscription_id: i.inscription_id,
|
||||
location: `${i.output}:${i.offset}`,
|
||||
block_hash: i.block_hash,
|
||||
block_height: parseInt(i.block_height),
|
||||
timestamp: parseTimestamp(i.timestamp),
|
||||
};
|
||||
switch (i.operation) {
|
||||
case DbBrc20EventOperation.deploy: {
|
||||
return {
|
||||
...activity,
|
||||
deploy: {
|
||||
max_supply: decimals(i.deploy_max, i.deploy_decimals),
|
||||
mint_limit: i.deploy_limit ? decimals(i.deploy_limit, i.deploy_decimals) : null,
|
||||
decimals: i.deploy_decimals,
|
||||
},
|
||||
};
|
||||
}
|
||||
case DbBrc20EventOperation.mint: {
|
||||
return {
|
||||
...activity,
|
||||
mint: {
|
||||
amount: decimals(i.amount, i.deploy_decimals),
|
||||
},
|
||||
};
|
||||
}
|
||||
case DbBrc20EventOperation.transfer: {
|
||||
return {
|
||||
...activity,
|
||||
transfer: {
|
||||
amount: decimals(i.amount, i.deploy_decimals),
|
||||
from_address: i.address,
|
||||
},
|
||||
};
|
||||
}
|
||||
case DbBrc20EventOperation.transferSend: {
|
||||
return {
|
||||
...activity,
|
||||
transfer_send: {
|
||||
amount: decimals(i.amount, i.deploy_decimals),
|
||||
from_address: i.address,
|
||||
to_address: i.to_address ?? i.address,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function parseBrc20Holders(items: DbBrc20Holder[]): Brc20HolderResponse[] {
|
||||
return items.map(i => ({
|
||||
address: i.address,
|
||||
overall_balance: decimals(i.total_balance, i.decimals),
|
||||
}));
|
||||
}
|
||||
|
||||
export function parseSatPoint(satpoint: string): {
|
||||
tx_id: string;
|
||||
vout: string;
|
||||
offset?: string;
|
||||
} {
|
||||
const [tx_id, vout, offset] = satpoint.split(':');
|
||||
return { tx_id: normalizedHexString(tx_id), vout: vout, offset };
|
||||
}
|
||||
|
||||
function decimals(num: string, decimals: number): string {
|
||||
return new BigNumber(num).dividedBy(10 ** decimals).toFixed(decimals);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes a `0x` prefixed hex string to a buffer.
|
||||
* @param hex - A hex string with a `0x` prefix.
|
||||
*/
|
||||
export function hexToBuffer(hex: string): Buffer {
|
||||
if (hex.length === 0) {
|
||||
return Buffer.alloc(0);
|
||||
}
|
||||
if (!hex.startsWith('0x')) {
|
||||
throw new Error(`Hex string is missing the "0x" prefix: "${hex}"`);
|
||||
}
|
||||
if (hex.length % 2 !== 0) {
|
||||
throw new Error(`Hex string is an odd number of digits: ${hex}`);
|
||||
}
|
||||
return Buffer.from(hex.substring(2), 'hex');
|
||||
}
|
||||
|
||||
const has0xPrefix = (id: string) => id.substr(0, 2).toLowerCase() === '0x';
|
||||
|
||||
export function normalizedHexString(hex: string): string {
|
||||
return has0xPrefix(hex) ? hex.substring(2) : hex;
|
||||
}
|
||||
|
||||
export function blockParam(param: string | undefined, name: string) {
|
||||
const out: Record<string, string> = {};
|
||||
if (BlockHashParamCType.Check(param)) {
|
||||
out[`${name}_hash`] = param;
|
||||
} else if (BlockHeightParamCType.Check(param)) {
|
||||
out[`${name}_height`] = param;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
107
api/ordinals/src/api/util/ordinal-satoshi.ts
Normal file
107
api/ordinals/src/api/util/ordinal-satoshi.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import BigNumber from 'bignumber.js';
|
||||
|
||||
const HALVING_BLOCKS = 210_000;
|
||||
const DIFFICULTY_ADJUST_BLOCKS = 2016;
|
||||
const INITIAL_SUBSIDY = 50;
|
||||
const SATS_PER_BTC = 100_000_000;
|
||||
export const SAT_SUPPLY = 2099999997690000;
|
||||
|
||||
export enum SatoshiRarity {
|
||||
common = 'common',
|
||||
uncommon = 'uncommon',
|
||||
rare = 'rare',
|
||||
epic = 'epic',
|
||||
legendary = 'legendary',
|
||||
mythic = 'mythic',
|
||||
}
|
||||
|
||||
/**
|
||||
* Ordinal Satoshi calculator. Mostly translated from the original Rust implementation at
|
||||
* https://github.com/casey/ord/blob/master/src/sat.rs
|
||||
*/
|
||||
export class OrdinalSatoshi {
|
||||
public blockHeight: number;
|
||||
public cycle: number;
|
||||
public ordinal: number;
|
||||
public epoch: number;
|
||||
public period: number;
|
||||
public offset: number;
|
||||
private hour: number;
|
||||
private minute: number;
|
||||
private second: number;
|
||||
private third: number;
|
||||
|
||||
constructor(ordinal: number) {
|
||||
if (ordinal > SAT_SUPPLY || ordinal < 0) throw Error('Invalid satoshi ordinal number');
|
||||
let satAccum = 0;
|
||||
let subsidy = INITIAL_SUBSIDY;
|
||||
let epoch = 0;
|
||||
while (true) {
|
||||
const satHalvingMax = HALVING_BLOCKS * subsidy * SATS_PER_BTC;
|
||||
if (satAccum + satHalvingMax > ordinal) {
|
||||
break;
|
||||
}
|
||||
satAccum += satHalvingMax;
|
||||
subsidy /= 2;
|
||||
epoch++;
|
||||
}
|
||||
const halvingOffset = ordinal - satAccum;
|
||||
const epochBoundary = epoch * HALVING_BLOCKS;
|
||||
const exactHeight = halvingOffset / (subsidy * SATS_PER_BTC) + epochBoundary;
|
||||
|
||||
this.ordinal = ordinal;
|
||||
this.blockHeight = Math.floor(exactHeight);
|
||||
this.cycle = this.hour = Math.floor(epoch / 6);
|
||||
this.minute = epochBoundary === 0 ? this.blockHeight : this.blockHeight % epochBoundary;
|
||||
this.second = this.blockHeight % DIFFICULTY_ADJUST_BLOCKS;
|
||||
this.third = this.offset = Math.round(
|
||||
(exactHeight - this.blockHeight) * subsidy * Math.pow(10, 8)
|
||||
);
|
||||
this.epoch = epoch;
|
||||
this.period = Math.floor(this.blockHeight / DIFFICULTY_ADJUST_BLOCKS);
|
||||
}
|
||||
|
||||
public get degree(): string {
|
||||
return `${this.hour}°${this.minute}′${this.second}″${this.third}‴`;
|
||||
}
|
||||
|
||||
public get decimal(): string {
|
||||
return `${this.blockHeight}.${this.third}`;
|
||||
}
|
||||
|
||||
public get name(): string {
|
||||
let x = SAT_SUPPLY - this.ordinal;
|
||||
const name: string[] = [];
|
||||
const alphabet = 'abcdefghijklmnopqrstuvwxyz'.split('');
|
||||
while (x > 0) {
|
||||
const index = Math.floor((x - 1) % 26);
|
||||
name.push(alphabet[index]);
|
||||
x = (x - 1) / 26;
|
||||
}
|
||||
return name.reverse().join('');
|
||||
}
|
||||
|
||||
public get percentile(): string {
|
||||
const percentile = new BigNumber((this.ordinal / (SAT_SUPPLY - 1)) * 100.0);
|
||||
return `${percentile.toFixed()}%`;
|
||||
}
|
||||
|
||||
public get rarity(): SatoshiRarity {
|
||||
if (this.hour === 0 && this.minute === 0 && this.second === 0 && this.third === 0) {
|
||||
return SatoshiRarity.mythic;
|
||||
}
|
||||
if (this.minute === 0 && this.second === 0 && this.third === 0) {
|
||||
return SatoshiRarity.legendary;
|
||||
}
|
||||
if (this.minute === 0 && this.third === 0) {
|
||||
return SatoshiRarity.epic;
|
||||
}
|
||||
if (this.second === 0 && this.third === 0) {
|
||||
return SatoshiRarity.rare;
|
||||
}
|
||||
if (this.third === 0) {
|
||||
return SatoshiRarity.uncommon;
|
||||
}
|
||||
return SatoshiRarity.common;
|
||||
}
|
||||
}
|
||||
37
api/ordinals/src/env.ts
Normal file
37
api/ordinals/src/env.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Static, Type } from '@sinclair/typebox';
|
||||
import envSchema from 'env-schema';
|
||||
|
||||
const schema = Type.Object({
|
||||
/** Hostname of the API server */
|
||||
API_HOST: Type.String({ default: '0.0.0.0' }),
|
||||
/** Port in which to serve the API */
|
||||
API_PORT: Type.Number({ default: 3000, minimum: 0, maximum: 65535 }),
|
||||
/** Port in which to serve the profiler */
|
||||
PROFILER_PORT: Type.Number({ default: 9119 }),
|
||||
|
||||
ORDINALS_PGHOST: Type.String(),
|
||||
ORDINALS_PGPORT: Type.Number({ default: 5432, minimum: 0, maximum: 65535 }),
|
||||
ORDINALS_PGUSER: Type.String(),
|
||||
ORDINALS_PGPASSWORD: Type.String(),
|
||||
ORDINALS_PGDATABASE: Type.String(),
|
||||
ORDINALS_SCHEMA: Type.Optional(Type.String()),
|
||||
|
||||
BRC20_PGHOST: Type.String(),
|
||||
BRC20_PGPORT: Type.Number({ default: 5432, minimum: 0, maximum: 65535 }),
|
||||
BRC20_PGUSER: Type.String(),
|
||||
BRC20_PGPASSWORD: Type.String(),
|
||||
BRC20_PGDATABASE: Type.String(),
|
||||
BRC20_SCHEMA: Type.Optional(Type.String()),
|
||||
|
||||
/** Limit to how many concurrent connections can be created */
|
||||
PG_CONNECTION_POOL_MAX: Type.Number({ default: 10 }),
|
||||
PG_IDLE_TIMEOUT: Type.Number({ default: 30 }),
|
||||
PG_MAX_LIFETIME: Type.Number({ default: 60 }),
|
||||
PG_STATEMENT_TIMEOUT: Type.Number({ default: 60_000 }),
|
||||
});
|
||||
type Env = Static<typeof schema>;
|
||||
|
||||
export const ENV = envSchema<Env>({
|
||||
schema: schema,
|
||||
dotenv: true,
|
||||
});
|
||||
74
api/ordinals/src/index.ts
Normal file
74
api/ordinals/src/index.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import {
|
||||
buildProfilerServer,
|
||||
isProdEnv,
|
||||
logger,
|
||||
registerShutdownConfig,
|
||||
} from '@hirosystems/api-toolkit';
|
||||
import { buildApiServer, buildPromServer } from './api/init';
|
||||
import { ENV } from './env';
|
||||
import { ApiMetrics } from './metrics/metrics';
|
||||
import { PgStore } from './pg/pg-store';
|
||||
import { Brc20PgStore } from './pg/brc20/brc20-pg-store';
|
||||
|
||||
async function initApiService(db: PgStore, brc20Db: Brc20PgStore) {
|
||||
logger.info('Initializing API service...');
|
||||
const fastify = await buildApiServer({ db, brc20Db });
|
||||
registerShutdownConfig({
|
||||
name: 'API Server',
|
||||
forceKillable: false,
|
||||
handler: async () => {
|
||||
await fastify.close();
|
||||
},
|
||||
});
|
||||
|
||||
await fastify.listen({ host: ENV.API_HOST, port: ENV.API_PORT });
|
||||
|
||||
if (isProdEnv) {
|
||||
const promServer = await buildPromServer({ metrics: fastify.metrics });
|
||||
registerShutdownConfig({
|
||||
name: 'Prometheus Server',
|
||||
forceKillable: false,
|
||||
handler: async () => {
|
||||
await promServer.close();
|
||||
},
|
||||
});
|
||||
|
||||
ApiMetrics.configure(db);
|
||||
await promServer.listen({ host: ENV.API_HOST, port: 9153 });
|
||||
|
||||
const profilerServer = await buildProfilerServer();
|
||||
registerShutdownConfig({
|
||||
name: 'Profiler Server',
|
||||
forceKillable: false,
|
||||
handler: async () => {
|
||||
await profilerServer.close();
|
||||
},
|
||||
});
|
||||
await profilerServer.listen({ host: ENV.API_HOST, port: ENV.PROFILER_PORT });
|
||||
}
|
||||
}
|
||||
|
||||
async function initApp() {
|
||||
logger.info(`Initializing Ordinals API...`);
|
||||
const db = await PgStore.connect();
|
||||
const brc20Db = await Brc20PgStore.connect();
|
||||
await initApiService(db, brc20Db);
|
||||
registerShutdownConfig({
|
||||
name: 'DB',
|
||||
forceKillable: false,
|
||||
handler: async () => {
|
||||
await db.close();
|
||||
await brc20Db.close();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
registerShutdownConfig();
|
||||
initApp()
|
||||
.then(() => {
|
||||
logger.info('App initialized');
|
||||
})
|
||||
.catch(error => {
|
||||
logger.error(error, `App failed to start`);
|
||||
process.exit(1);
|
||||
});
|
||||
42
api/ordinals/src/metrics/metrics.ts
Normal file
42
api/ordinals/src/metrics/metrics.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import * as prom from 'prom-client';
|
||||
import { PgStore } from '../pg/pg-store';
|
||||
|
||||
export class ApiMetrics {
|
||||
/** The most recent Bitcoin block height ingested by the API */
|
||||
readonly ordinals_api_block_height: prom.Gauge;
|
||||
/** Maximum blessed inscription number */
|
||||
readonly ordinals_api_max_inscription_number: prom.Gauge;
|
||||
/** Maximum cursed inscription number */
|
||||
readonly ordinals_api_max_cursed_inscription_number: prom.Gauge;
|
||||
|
||||
static configure(db: PgStore): ApiMetrics {
|
||||
return new ApiMetrics(db);
|
||||
}
|
||||
|
||||
private constructor(db: PgStore) {
|
||||
this.ordinals_api_block_height = new prom.Gauge({
|
||||
name: `ordinals_api_block_height`,
|
||||
help: 'The most recent Bitcoin block height ingested by the API',
|
||||
async collect() {
|
||||
const height = await db.getChainTipBlockHeight();
|
||||
this.set(height);
|
||||
},
|
||||
});
|
||||
this.ordinals_api_max_inscription_number = new prom.Gauge({
|
||||
name: `ordinals_api_max_inscription_number`,
|
||||
help: 'Maximum blessed inscription number',
|
||||
async collect() {
|
||||
const max = await db.getMaxInscriptionNumber();
|
||||
if (max) this.set(max);
|
||||
},
|
||||
});
|
||||
this.ordinals_api_max_cursed_inscription_number = new prom.Gauge({
|
||||
name: `ordinals_api_max_cursed_inscription_number`,
|
||||
help: 'Maximum cursed inscription number',
|
||||
async collect() {
|
||||
const max = await db.getMaxCursedInscriptionNumber();
|
||||
if (max) this.set(max);
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
255
api/ordinals/src/pg/brc20/brc20-pg-store.ts
Normal file
255
api/ordinals/src/pg/brc20/brc20-pg-store.ts
Normal file
@@ -0,0 +1,255 @@
|
||||
import { BasePgStore, connectPostgres, PgConnectionVars } from '@hirosystems/api-toolkit';
|
||||
import { DbInscriptionIndexPaging, DbPaginatedResult } from '../types';
|
||||
import {
|
||||
DbBrc20Activity,
|
||||
DbBrc20Balance,
|
||||
DbBrc20Holder,
|
||||
DbBrc20Token,
|
||||
DbBrc20TokenWithSupply,
|
||||
} from './types';
|
||||
import { Brc20TokenOrderBy } from '../../api/schemas';
|
||||
import { objRemoveUndefinedValues } from '../helpers';
|
||||
import { sqlOr } from './helpers';
|
||||
import { ENV } from '../../env';
|
||||
|
||||
export class Brc20PgStore extends BasePgStore {
|
||||
static async connect(): Promise<Brc20PgStore> {
|
||||
const pgConfig: PgConnectionVars = {
|
||||
host: ENV.BRC20_PGHOST,
|
||||
port: ENV.BRC20_PGPORT,
|
||||
user: ENV.BRC20_PGUSER,
|
||||
password: ENV.BRC20_PGPASSWORD,
|
||||
database: ENV.BRC20_PGDATABASE,
|
||||
schema: ENV.BRC20_SCHEMA,
|
||||
};
|
||||
const sql = await connectPostgres({
|
||||
usageName: 'brc20-pg-store',
|
||||
connectionArgs: pgConfig,
|
||||
connectionConfig: {
|
||||
poolMax: ENV.PG_CONNECTION_POOL_MAX,
|
||||
idleTimeout: ENV.PG_IDLE_TIMEOUT,
|
||||
maxLifetime: ENV.PG_MAX_LIFETIME,
|
||||
statementTimeout: ENV.PG_STATEMENT_TIMEOUT,
|
||||
},
|
||||
});
|
||||
return new Brc20PgStore(sql);
|
||||
}
|
||||
|
||||
async getTokens(
|
||||
args: { ticker?: string[]; order_by?: Brc20TokenOrderBy } & DbInscriptionIndexPaging
|
||||
): Promise<DbPaginatedResult<DbBrc20Token>> {
|
||||
const tickerPrefixCondition = sqlOr(
|
||||
this.sql,
|
||||
args.ticker?.map(t => this.sql`d.ticker LIKE LOWER(${t}) || '%'`)
|
||||
);
|
||||
const orderBy =
|
||||
args.order_by === Brc20TokenOrderBy.tx_count
|
||||
? this.sql`d.tx_count DESC` // tx_count
|
||||
: this.sql`d.block_height DESC, d.tx_index DESC`; // default: `index`
|
||||
const results = await this.sql<(DbBrc20Token & { total: number })[]>`
|
||||
${
|
||||
args.ticker === undefined
|
||||
? this.sql`WITH global_count AS (
|
||||
SELECT COALESCE(count, 0) AS count
|
||||
FROM counts_by_operation
|
||||
WHERE operation = 'deploy'
|
||||
)`
|
||||
: this.sql``
|
||||
}
|
||||
SELECT
|
||||
d.*,
|
||||
${
|
||||
args.ticker ? this.sql`COUNT(*) OVER()` : this.sql`(SELECT count FROM global_count)`
|
||||
} AS total
|
||||
FROM tokens AS d
|
||||
${tickerPrefixCondition ? this.sql`WHERE ${tickerPrefixCondition}` : this.sql``}
|
||||
ORDER BY ${orderBy}
|
||||
OFFSET ${args.offset}
|
||||
LIMIT ${args.limit}
|
||||
`;
|
||||
return {
|
||||
total: results[0]?.total ?? 0,
|
||||
results: results ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
async getBalances(
|
||||
args: {
|
||||
address: string;
|
||||
ticker?: string[];
|
||||
block_height?: number;
|
||||
} & DbInscriptionIndexPaging
|
||||
): Promise<DbPaginatedResult<DbBrc20Balance>> {
|
||||
const ticker = sqlOr(
|
||||
this.sql,
|
||||
args.ticker?.map(t => this.sql`d.ticker LIKE LOWER(${t}) || '%'`)
|
||||
);
|
||||
// Change selection table depending if we're filtering by block height or not.
|
||||
const results = await this.sql<(DbBrc20Balance & { total: number })[]>`
|
||||
${
|
||||
args.block_height
|
||||
? this.sql`
|
||||
SELECT
|
||||
d.ticker, d.decimals,
|
||||
SUM(b.avail_balance) AS avail_balance,
|
||||
SUM(b.trans_balance) AS trans_balance,
|
||||
SUM(b.avail_balance + b.trans_balance) AS total_balance,
|
||||
COUNT(*) OVER() as total
|
||||
FROM operations AS b
|
||||
INNER JOIN tokens AS d ON d.ticker = b.ticker
|
||||
WHERE
|
||||
b.address = ${args.address}
|
||||
AND b.block_height <= ${args.block_height}
|
||||
${ticker ? this.sql`AND ${ticker}` : this.sql``}
|
||||
GROUP BY d.ticker, d.decimals
|
||||
HAVING SUM(b.avail_balance + b.trans_balance) > 0
|
||||
`
|
||||
: this.sql`
|
||||
SELECT d.ticker, d.decimals, b.avail_balance, b.trans_balance, b.total_balance, COUNT(*) OVER() as total
|
||||
FROM balances AS b
|
||||
INNER JOIN tokens AS d ON d.ticker = b.ticker
|
||||
WHERE
|
||||
b.total_balance > 0
|
||||
AND b.address = ${args.address}
|
||||
${ticker ? this.sql`AND ${ticker}` : this.sql``}
|
||||
`
|
||||
}
|
||||
LIMIT ${args.limit}
|
||||
OFFSET ${args.offset}
|
||||
`;
|
||||
return {
|
||||
total: results[0]?.total ?? 0,
|
||||
results: results ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
async getToken(args: { ticker: string }): Promise<DbBrc20TokenWithSupply | undefined> {
|
||||
const result = await this.sql<DbBrc20TokenWithSupply[]>`
|
||||
WITH token AS (
|
||||
SELECT d.*
|
||||
FROM tokens AS d
|
||||
WHERE d.ticker = LOWER(${args.ticker})
|
||||
),
|
||||
holders AS (
|
||||
SELECT COUNT(*) AS count
|
||||
FROM balances
|
||||
WHERE ticker = (SELECT ticker FROM token) AND total_balance > 0
|
||||
)
|
||||
SELECT *, COALESCE((SELECT count FROM holders), 0) AS holders
|
||||
FROM token
|
||||
`;
|
||||
if (result.count) return result[0];
|
||||
}
|
||||
|
||||
async getTokenHolders(
|
||||
args: {
|
||||
ticker: string;
|
||||
} & DbInscriptionIndexPaging
|
||||
): Promise<DbPaginatedResult<DbBrc20Holder> | undefined> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const token = await sql<{ id: string; decimals: number }[]>`
|
||||
SELECT ticker FROM tokens WHERE ticker = LOWER(${args.ticker})
|
||||
`;
|
||||
if (token.count === 0) return;
|
||||
const results = await sql<(DbBrc20Holder & { total: number })[]>`
|
||||
SELECT
|
||||
b.address, d.decimals, b.total_balance, COUNT(*) OVER() AS total
|
||||
FROM balances AS b
|
||||
INNER JOIN tokens AS d USING (ticker)
|
||||
WHERE b.ticker = LOWER(${args.ticker})
|
||||
ORDER BY b.total_balance DESC
|
||||
LIMIT ${args.limit}
|
||||
OFFSET ${args.offset}
|
||||
`;
|
||||
return {
|
||||
total: results[0]?.total ?? 0,
|
||||
results: results ?? [],
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getActivity(
|
||||
page: DbInscriptionIndexPaging,
|
||||
filters: {
|
||||
ticker?: string[];
|
||||
block_height?: number;
|
||||
operation?: string[];
|
||||
address?: string;
|
||||
}
|
||||
): Promise<DbPaginatedResult<DbBrc20Activity>> {
|
||||
// Do we need a specific result count such as total activity or activity per address?
|
||||
objRemoveUndefinedValues(filters);
|
||||
const filterLength = Object.keys(filters).length;
|
||||
const needsGlobalEventCount =
|
||||
filterLength === 0 ||
|
||||
(filterLength === 1 && filters.operation && filters.operation.length > 0);
|
||||
const needsAddressEventCount =
|
||||
(filterLength === 1 && filters.address != undefined && filters.address != '') ||
|
||||
(filterLength === 2 &&
|
||||
filters.operation &&
|
||||
filters.operation.length > 0 &&
|
||||
filters.address != undefined &&
|
||||
filters.address != '');
|
||||
const needsTickerCount = filterLength === 1 && filters.ticker && filters.ticker.length > 0;
|
||||
const operationsFilter = filters.operation?.filter(i => i !== 'transfer_receive');
|
||||
|
||||
return this.sqlTransaction(async sql => {
|
||||
const results = await sql<(DbBrc20Activity & { total: number })[]>`
|
||||
WITH event_count AS (${
|
||||
needsGlobalEventCount
|
||||
? sql`
|
||||
SELECT COALESCE(SUM(count), 0) AS count
|
||||
FROM counts_by_operation
|
||||
${operationsFilter ? sql`WHERE operation IN ${sql(operationsFilter)}` : sql``}
|
||||
`
|
||||
: needsAddressEventCount
|
||||
? sql`
|
||||
SELECT SUM(count) AS count
|
||||
FROM counts_by_address_operation
|
||||
WHERE address = ${filters.address}
|
||||
${operationsFilter ? sql`AND operation IN ${sql(operationsFilter)}` : sql``}
|
||||
`
|
||||
: needsTickerCount && filters.ticker !== undefined
|
||||
? sql`
|
||||
SELECT COALESCE(SUM(tx_count), 0) AS count
|
||||
FROM tokens AS d
|
||||
WHERE ticker IN ${sql(filters.ticker)}
|
||||
`
|
||||
: sql`SELECT NULL AS count`
|
||||
})
|
||||
SELECT
|
||||
e.*,
|
||||
d.max AS deploy_max,
|
||||
d.limit AS deploy_limit,
|
||||
d.decimals AS deploy_decimals,
|
||||
${
|
||||
needsGlobalEventCount || needsAddressEventCount || needsTickerCount
|
||||
? sql`(SELECT count FROM event_count)`
|
||||
: sql`COUNT(*) OVER()`
|
||||
} AS total
|
||||
FROM operations AS e
|
||||
INNER JOIN tokens AS d ON d.ticker = e.ticker
|
||||
WHERE TRUE
|
||||
${
|
||||
operationsFilter
|
||||
? sql`AND e.operation IN ${sql(operationsFilter)}`
|
||||
: sql`AND e.operation <> 'transfer_receive'`
|
||||
}
|
||||
${filters.ticker ? sql`AND e.ticker IN ${sql(filters.ticker)}` : sql``}
|
||||
${filters.block_height ? sql`AND e.block_height = ${filters.block_height}` : sql``}
|
||||
${
|
||||
filters.address
|
||||
? sql`AND (e.address = ${filters.address} OR e.to_address = ${filters.address})`
|
||||
: sql``
|
||||
}
|
||||
ORDER BY e.block_height DESC, e.tx_index DESC
|
||||
LIMIT ${page.limit}
|
||||
OFFSET ${page.offset}
|
||||
`;
|
||||
return {
|
||||
total: results[0]?.total ?? 0,
|
||||
results: results ?? [],
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
9
api/ordinals/src/pg/brc20/helpers.ts
Normal file
9
api/ordinals/src/pg/brc20/helpers.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import * as postgres from 'postgres';
|
||||
import { PgSqlClient } from '@hirosystems/api-toolkit';
|
||||
|
||||
export function sqlOr(
|
||||
sql: PgSqlClient,
|
||||
partials: postgres.PendingQuery<postgres.Row[]>[] | undefined
|
||||
) {
|
||||
return partials?.reduce((acc, curr) => sql`${acc} OR ${curr}`);
|
||||
}
|
||||
65
api/ordinals/src/pg/brc20/types.ts
Normal file
65
api/ordinals/src/pg/brc20/types.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
export type DbBrc20Token = {
|
||||
ticker: string;
|
||||
display_ticker: string;
|
||||
inscription_id: string;
|
||||
inscription_number: string;
|
||||
block_height: string;
|
||||
block_hash: string;
|
||||
tx_id: string;
|
||||
tx_index: number;
|
||||
address: string;
|
||||
max: string;
|
||||
limit: string;
|
||||
decimals: number;
|
||||
self_mint: boolean;
|
||||
minted_supply: string;
|
||||
tx_count: string;
|
||||
timestamp: number;
|
||||
};
|
||||
|
||||
export type DbBrc20TokenWithSupply = DbBrc20Token & {
|
||||
minted_supply: string;
|
||||
holders: string;
|
||||
};
|
||||
|
||||
export type DbBrc20Holder = {
|
||||
address: string;
|
||||
total_balance: string;
|
||||
decimals: number;
|
||||
};
|
||||
|
||||
export type DbBrc20Balance = {
|
||||
ticker: string;
|
||||
decimals: number;
|
||||
avail_balance: string;
|
||||
trans_balance: string;
|
||||
total_balance: string;
|
||||
};
|
||||
|
||||
export enum DbBrc20EventOperation {
|
||||
deploy = 'deploy',
|
||||
mint = 'mint',
|
||||
transfer = 'transfer',
|
||||
transferSend = 'transfer_send',
|
||||
}
|
||||
|
||||
export type DbBrc20Activity = {
|
||||
ticker: string;
|
||||
operation: DbBrc20EventOperation;
|
||||
inscription_id: string;
|
||||
inscription_number: string;
|
||||
ordinal_number: string;
|
||||
block_height: string;
|
||||
block_hash: string;
|
||||
tx_id: string;
|
||||
tx_index: number;
|
||||
output: string;
|
||||
offset: string;
|
||||
timestamp: number;
|
||||
amount: string;
|
||||
address: string;
|
||||
to_address: string | null;
|
||||
deploy_decimals: number;
|
||||
deploy_max: string;
|
||||
deploy_limit: string | null;
|
||||
};
|
||||
163
api/ordinals/src/pg/counts/counts-pg-store.ts
Normal file
163
api/ordinals/src/pg/counts/counts-pg-store.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import { BasePgStoreModule, PgSqlClient } from '@hirosystems/api-toolkit';
|
||||
import { SatoshiRarity } from '../../api/util/ordinal-satoshi';
|
||||
import {
|
||||
DbInscriptionCountPerBlock,
|
||||
DbInscriptionCountPerBlockFilters,
|
||||
DbInscriptionIndexFilters,
|
||||
DbInscriptionType,
|
||||
} from '../types';
|
||||
import { DbInscriptionIndexResultCountType } from './types';
|
||||
|
||||
/**
|
||||
* This class affects all the different tables that track inscription counts according to different
|
||||
* parameters (sat rarity, mime type, cursed, blessed, current owner, etc.)
|
||||
*/
|
||||
export class CountsPgStore extends BasePgStoreModule {
|
||||
async fromResults(
|
||||
countType: DbInscriptionIndexResultCountType,
|
||||
filters?: DbInscriptionIndexFilters
|
||||
): Promise<number | undefined> {
|
||||
switch (countType) {
|
||||
case DbInscriptionIndexResultCountType.all:
|
||||
return await this.getInscriptionCount();
|
||||
case DbInscriptionIndexResultCountType.cursed:
|
||||
return await this.getInscriptionCount(
|
||||
filters?.cursed === true ? DbInscriptionType.cursed : DbInscriptionType.blessed
|
||||
);
|
||||
case DbInscriptionIndexResultCountType.mimeType:
|
||||
return await this.getMimeTypeCount(filters?.mime_type);
|
||||
case DbInscriptionIndexResultCountType.satRarity:
|
||||
return await this.getSatRarityCount(filters?.sat_rarity);
|
||||
case DbInscriptionIndexResultCountType.address:
|
||||
return await this.getAddressCount(filters?.address);
|
||||
case DbInscriptionIndexResultCountType.recursive:
|
||||
return await this.getRecursiveCount(filters?.recursive);
|
||||
case DbInscriptionIndexResultCountType.genesisAddress:
|
||||
return await this.getGenesisAddressCount(filters?.genesis_address);
|
||||
case DbInscriptionIndexResultCountType.blockHeight:
|
||||
return await this.getBlockCount(
|
||||
filters?.genesis_block_height,
|
||||
filters?.genesis_block_height
|
||||
);
|
||||
case DbInscriptionIndexResultCountType.fromblockHeight:
|
||||
return await this.getBlockCount(filters?.from_genesis_block_height);
|
||||
case DbInscriptionIndexResultCountType.toblockHeight:
|
||||
return await this.getBlockCount(undefined, filters?.to_genesis_block_height);
|
||||
case DbInscriptionIndexResultCountType.blockHeightRange:
|
||||
return await this.getBlockCount(
|
||||
filters?.from_genesis_block_height,
|
||||
filters?.to_genesis_block_height
|
||||
);
|
||||
case DbInscriptionIndexResultCountType.blockHash:
|
||||
return await this.getBlockHashCount(filters?.genesis_block_hash);
|
||||
}
|
||||
}
|
||||
|
||||
async getInscriptionCountPerBlock(
|
||||
filters: DbInscriptionCountPerBlockFilters
|
||||
): Promise<DbInscriptionCountPerBlock[]> {
|
||||
const fromCondition = filters.from_block_height
|
||||
? this.sql`block_height >= ${filters.from_block_height}`
|
||||
: this.sql``;
|
||||
|
||||
const toCondition = filters.to_block_height
|
||||
? this.sql`block_height <= ${filters.to_block_height}`
|
||||
: this.sql``;
|
||||
|
||||
const where =
|
||||
filters.from_block_height && filters.to_block_height
|
||||
? this.sql`WHERE ${fromCondition} AND ${toCondition}`
|
||||
: this.sql`WHERE ${fromCondition}${toCondition}`;
|
||||
|
||||
return await this.sql<DbInscriptionCountPerBlock[]>`
|
||||
SELECT *
|
||||
FROM counts_by_block
|
||||
${filters.from_block_height || filters.to_block_height ? where : this.sql``}
|
||||
ORDER BY block_height DESC
|
||||
LIMIT 5000
|
||||
`; // roughly 35 days of blocks, assuming 10 minute block times on a full database
|
||||
}
|
||||
|
||||
private async getBlockCount(from?: number, to?: number): Promise<number> {
|
||||
if (from === undefined && to === undefined) return 0;
|
||||
const result = await this.sql<{ count: number }[]>`
|
||||
SELECT COALESCE(SUM(inscription_count), 0)::int AS count
|
||||
FROM counts_by_block
|
||||
WHERE TRUE
|
||||
${from !== undefined ? this.sql`AND block_height >= ${from}` : this.sql``}
|
||||
${to !== undefined ? this.sql`AND block_height <= ${to}` : this.sql``}
|
||||
`;
|
||||
return result[0].count;
|
||||
}
|
||||
|
||||
private async getBlockHashCount(hash?: string): Promise<number> {
|
||||
if (!hash) return 0;
|
||||
const result = await this.sql<{ count: number }[]>`
|
||||
SELECT COALESCE(SUM(inscription_count), 0)::int AS count
|
||||
FROM counts_by_block
|
||||
WHERE block_hash = ${hash}
|
||||
`;
|
||||
return result[0].count;
|
||||
}
|
||||
|
||||
private async getInscriptionCount(type?: DbInscriptionType): Promise<number> {
|
||||
const types =
|
||||
type !== undefined ? [type] : [DbInscriptionType.blessed, DbInscriptionType.cursed];
|
||||
const result = await this.sql<{ count: number }[]>`
|
||||
SELECT COALESCE(SUM(count), 0)::int AS count
|
||||
FROM counts_by_type
|
||||
WHERE type IN ${this.sql(types)}
|
||||
`;
|
||||
return result[0].count;
|
||||
}
|
||||
|
||||
private async getMimeTypeCount(mimeType?: string[]): Promise<number> {
|
||||
if (!mimeType) return 0;
|
||||
const result = await this.sql<{ count: number }[]>`
|
||||
SELECT COALESCE(SUM(count), 0)::int AS count
|
||||
FROM counts_by_mime_type
|
||||
WHERE mime_type IN ${this.sql(mimeType)}
|
||||
`;
|
||||
return result[0].count;
|
||||
}
|
||||
|
||||
private async getSatRarityCount(satRarity?: SatoshiRarity[]): Promise<number> {
|
||||
if (!satRarity) return 0;
|
||||
const result = await this.sql<{ count: number }[]>`
|
||||
SELECT COALESCE(SUM(count), 0)::int AS count
|
||||
FROM counts_by_sat_rarity
|
||||
WHERE rarity IN ${this.sql(satRarity)}
|
||||
`;
|
||||
return result[0].count;
|
||||
}
|
||||
|
||||
private async getRecursiveCount(recursive?: boolean): Promise<number> {
|
||||
const rec = recursive !== undefined ? [recursive] : [true, false];
|
||||
const result = await this.sql<{ count: number }[]>`
|
||||
SELECT COALESCE(SUM(count), 0)::int AS count
|
||||
FROM counts_by_recursive
|
||||
WHERE recursive IN ${this.sql(rec)}
|
||||
`;
|
||||
return result[0].count;
|
||||
}
|
||||
|
||||
async getAddressCount(address?: string[]): Promise<number> {
|
||||
if (!address) return 0;
|
||||
const result = await this.sql<{ count: number }[]>`
|
||||
SELECT COALESCE(SUM(count), 0)::int AS count
|
||||
FROM counts_by_address
|
||||
WHERE address IN ${this.sql(address)}
|
||||
`;
|
||||
return result[0].count;
|
||||
}
|
||||
|
||||
private async getGenesisAddressCount(genesisAddress?: string[]): Promise<number> {
|
||||
if (!genesisAddress) return 0;
|
||||
const result = await this.sql<{ count: number }[]>`
|
||||
SELECT COALESCE(SUM(count), 0)::int AS count
|
||||
FROM counts_by_genesis_address
|
||||
WHERE address IN ${this.sql(genesisAddress)}
|
||||
`;
|
||||
return result[0].count;
|
||||
}
|
||||
}
|
||||
38
api/ordinals/src/pg/counts/helpers.ts
Normal file
38
api/ordinals/src/pg/counts/helpers.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { objRemoveUndefinedValues } from '../helpers';
|
||||
import { DbInscriptionIndexFilters } from '../types';
|
||||
import { DbInscriptionIndexResultCountType } from './types';
|
||||
|
||||
/**
|
||||
* Returns which inscription count is required based on filters sent to the index endpoint.
|
||||
* @param filters - DbInscriptionIndexFilters
|
||||
* @returns DbInscriptionIndexResultCountType
|
||||
*/
|
||||
export function getIndexResultCountType(
|
||||
filters?: DbInscriptionIndexFilters
|
||||
): DbInscriptionIndexResultCountType {
|
||||
if (!filters) return DbInscriptionIndexResultCountType.all;
|
||||
// How many filters do we have?
|
||||
objRemoveUndefinedValues(filters);
|
||||
switch (Object.keys(filters).length) {
|
||||
case 0:
|
||||
return DbInscriptionIndexResultCountType.all;
|
||||
case 1:
|
||||
if (filters.mime_type) return DbInscriptionIndexResultCountType.mimeType;
|
||||
if (filters.sat_rarity) return DbInscriptionIndexResultCountType.satRarity;
|
||||
if (filters.address) return DbInscriptionIndexResultCountType.address;
|
||||
if (filters.genesis_address) return DbInscriptionIndexResultCountType.genesisAddress;
|
||||
if (filters.genesis_block_height) return DbInscriptionIndexResultCountType.blockHeight;
|
||||
if (filters.from_genesis_block_height)
|
||||
return DbInscriptionIndexResultCountType.fromblockHeight;
|
||||
if (filters.to_genesis_block_height) return DbInscriptionIndexResultCountType.toblockHeight;
|
||||
if (filters.genesis_block_hash) return DbInscriptionIndexResultCountType.blockHash;
|
||||
if (filters.cursed !== undefined) return DbInscriptionIndexResultCountType.cursed;
|
||||
if (filters.recursive !== undefined) return DbInscriptionIndexResultCountType.recursive;
|
||||
if (filters.number || filters.genesis_id || filters.output || filters.sat_ordinal)
|
||||
return DbInscriptionIndexResultCountType.singleResult;
|
||||
case 2:
|
||||
if (filters.from_genesis_block_height && filters.to_genesis_block_height)
|
||||
return DbInscriptionIndexResultCountType.blockHeightRange;
|
||||
}
|
||||
return DbInscriptionIndexResultCountType.custom;
|
||||
}
|
||||
27
api/ordinals/src/pg/counts/types.ts
Normal file
27
api/ordinals/src/pg/counts/types.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
/** Type of row count required for an inscription index endpoint call */
|
||||
export enum DbInscriptionIndexResultCountType {
|
||||
/** All inscriptions */
|
||||
all,
|
||||
/** Filtered by cursed or blessed */
|
||||
cursed,
|
||||
/** Filtered by mime type */
|
||||
mimeType,
|
||||
/** Filtered by sat rarity */
|
||||
satRarity,
|
||||
/** Filtered by address */
|
||||
address,
|
||||
genesisAddress,
|
||||
/** Filtered by block height */
|
||||
blockHeight,
|
||||
fromblockHeight,
|
||||
toblockHeight,
|
||||
blockHeightRange,
|
||||
/** Filtered by block hash */
|
||||
blockHash,
|
||||
/** Filtered by recursive */
|
||||
recursive,
|
||||
/** Filtered by some other param that yields a single result (easy to count) */
|
||||
singleResult,
|
||||
/** Filtered by custom arguments (tough to count) */
|
||||
custom,
|
||||
}
|
||||
3
api/ordinals/src/pg/helpers.ts
Normal file
3
api/ordinals/src/pg/helpers.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function objRemoveUndefinedValues(obj: object) {
|
||||
Object.keys(obj).forEach(key => (obj as any)[key] === undefined && delete (obj as any)[key]);
|
||||
}
|
||||
395
api/ordinals/src/pg/pg-store.ts
Normal file
395
api/ordinals/src/pg/pg-store.ts
Normal file
@@ -0,0 +1,395 @@
|
||||
import {
|
||||
BasePgStore,
|
||||
PgConnectionVars,
|
||||
PgSqlClient,
|
||||
connectPostgres,
|
||||
} from '@hirosystems/api-toolkit';
|
||||
import { Order, OrderBy } from '../api/schemas';
|
||||
import { ENV } from '../env';
|
||||
import { CountsPgStore } from './counts/counts-pg-store';
|
||||
import { getIndexResultCountType } from './counts/helpers';
|
||||
import {
|
||||
DbFullyLocatedInscriptionResult,
|
||||
DbInscriptionContent,
|
||||
DbInscriptionIndexFilters,
|
||||
DbInscriptionIndexOrder,
|
||||
DbInscriptionIndexPaging,
|
||||
DbInscriptionLocationChange,
|
||||
DbLocation,
|
||||
DbPaginatedResult,
|
||||
} from './types';
|
||||
|
||||
type InscriptionIdentifier = { genesis_id: string } | { number: number };
|
||||
|
||||
export class PgStore extends BasePgStore {
|
||||
readonly counts: CountsPgStore;
|
||||
|
||||
static async connect(): Promise<PgStore> {
|
||||
const pgConfig: PgConnectionVars = {
|
||||
host: ENV.ORDINALS_PGHOST,
|
||||
port: ENV.ORDINALS_PGPORT,
|
||||
user: ENV.ORDINALS_PGUSER,
|
||||
password: ENV.ORDINALS_PGPASSWORD,
|
||||
database: ENV.ORDINALS_PGDATABASE,
|
||||
schema: ENV.ORDINALS_SCHEMA,
|
||||
};
|
||||
const sql = await connectPostgres({
|
||||
usageName: 'ordinals-pg-store',
|
||||
connectionArgs: pgConfig,
|
||||
connectionConfig: {
|
||||
poolMax: ENV.PG_CONNECTION_POOL_MAX,
|
||||
idleTimeout: ENV.PG_IDLE_TIMEOUT,
|
||||
maxLifetime: ENV.PG_MAX_LIFETIME,
|
||||
statementTimeout: ENV.PG_STATEMENT_TIMEOUT,
|
||||
},
|
||||
});
|
||||
return new PgStore(sql);
|
||||
}
|
||||
|
||||
constructor(sql: PgSqlClient) {
|
||||
super(sql);
|
||||
this.counts = new CountsPgStore(this);
|
||||
}
|
||||
|
||||
async getChainTipBlockHeight(): Promise<number> {
|
||||
const result = await this.sql<{ block_height: string }[]>`SELECT block_height FROM chain_tip`;
|
||||
return parseInt(result[0].block_height);
|
||||
}
|
||||
|
||||
async getMaxInscriptionNumber(): Promise<number | undefined> {
|
||||
const result = await this.sql<{ max: string }[]>`
|
||||
SELECT MAX(number) FROM inscriptions WHERE number >= 0
|
||||
`;
|
||||
if (result[0].max) {
|
||||
return parseInt(result[0].max);
|
||||
}
|
||||
}
|
||||
|
||||
async getMaxCursedInscriptionNumber(): Promise<number | undefined> {
|
||||
const result = await this.sql<{ min: string }[]>`
|
||||
SELECT MIN(number) FROM inscriptions WHERE number < 0
|
||||
`;
|
||||
if (result[0].min) {
|
||||
return parseInt(result[0].min);
|
||||
}
|
||||
}
|
||||
|
||||
async getInscriptionsIndexETag(): Promise<string> {
|
||||
const result = await this.sql<{ etag: string }[]>`
|
||||
SELECT MAX(timestamp)::text AS etag FROM locations
|
||||
`;
|
||||
return result[0].etag;
|
||||
}
|
||||
|
||||
async getInscriptionsPerBlockETag(): Promise<string> {
|
||||
const result = await this.sql<{ block_hash: string; inscription_count: string }[]>`
|
||||
SELECT block_hash, inscription_count
|
||||
FROM counts_by_block
|
||||
ORDER BY block_height DESC
|
||||
LIMIT 1
|
||||
`;
|
||||
return `${result[0].block_hash}:${result[0].inscription_count}`;
|
||||
}
|
||||
|
||||
async getInscriptionContent(
|
||||
args: InscriptionIdentifier
|
||||
): Promise<DbInscriptionContent | undefined> {
|
||||
const result = await this.sql<DbInscriptionContent[]>`
|
||||
WITH content_id AS (
|
||||
SELECT
|
||||
CASE
|
||||
WHEN delegate IS NOT NULL THEN delegate
|
||||
ELSE inscription_id
|
||||
END AS genesis_id
|
||||
FROM inscriptions
|
||||
WHERE ${
|
||||
'genesis_id' in args
|
||||
? this.sql`inscription_id = ${args.genesis_id}`
|
||||
: this.sql`number = ${args.number}`
|
||||
}
|
||||
)
|
||||
SELECT content, content_type, content_length
|
||||
FROM inscriptions
|
||||
WHERE inscription_id = (SELECT genesis_id FROM content_id)
|
||||
`;
|
||||
if (result.count > 0) {
|
||||
return result[0];
|
||||
}
|
||||
}
|
||||
|
||||
async getInscriptionETag(args: InscriptionIdentifier): Promise<string | undefined> {
|
||||
const result = await this.sql<{ etag: string }[]>`
|
||||
SELECT l.timestamp::text AS etag
|
||||
FROM inscriptions AS i
|
||||
INNER JOIN current_locations AS c ON i.ordinal_number = c.ordinal_number
|
||||
INNER JOIN locations AS l ON
|
||||
l.ordinal_number = c.ordinal_number AND
|
||||
l.block_height = c.block_height AND
|
||||
l.tx_index = c.tx_index
|
||||
WHERE ${
|
||||
'genesis_id' in args
|
||||
? this.sql`i.inscription_id = ${args.genesis_id}`
|
||||
: this.sql`i.number = ${args.number}`
|
||||
}
|
||||
`;
|
||||
if (result.count > 0) {
|
||||
return result[0].etag;
|
||||
}
|
||||
}
|
||||
|
||||
async getInscriptions(
|
||||
page: DbInscriptionIndexPaging,
|
||||
filters?: DbInscriptionIndexFilters,
|
||||
sort?: DbInscriptionIndexOrder
|
||||
): Promise<DbPaginatedResult<DbFullyLocatedInscriptionResult>> {
|
||||
return await this.sqlTransaction(async sql => {
|
||||
const order = sort?.order === Order.asc ? sql`ASC` : sql`DESC`;
|
||||
let orderBy = sql`i.number ${order}`;
|
||||
switch (sort?.order_by) {
|
||||
case OrderBy.genesis_block_height:
|
||||
orderBy = sql`i.block_height ${order}, i.tx_index ${order}`;
|
||||
break;
|
||||
case OrderBy.ordinal:
|
||||
orderBy = sql`i.ordinal_number ${order}`;
|
||||
break;
|
||||
case OrderBy.rarity:
|
||||
orderBy = sql`ARRAY_POSITION(ARRAY['common','uncommon','rare','epic','legendary','mythic'], s.rarity) ${order}, i.number DESC`;
|
||||
break;
|
||||
}
|
||||
// Do we need a filtered `COUNT(*)`? If so, try to use the pre-calculated counts we have in
|
||||
// cached tables to speed up these queries.
|
||||
const countType = getIndexResultCountType(filters);
|
||||
const total = await this.counts.fromResults(countType, filters);
|
||||
const results = await sql<(DbFullyLocatedInscriptionResult & { total: number })[]>`
|
||||
WITH results AS (
|
||||
SELECT
|
||||
i.inscription_id AS genesis_id,
|
||||
i.number,
|
||||
i.mime_type,
|
||||
i.content_type,
|
||||
i.content_length,
|
||||
i.fee AS genesis_fee,
|
||||
i.curse_type,
|
||||
i.ordinal_number AS sat_ordinal,
|
||||
i.parent,
|
||||
i.metadata,
|
||||
s.rarity AS sat_rarity,
|
||||
s.coinbase_height AS sat_coinbase_height,
|
||||
i.recursive,
|
||||
(
|
||||
SELECT STRING_AGG(ir.ref_inscription_id, ',')
|
||||
FROM inscription_recursions AS ir
|
||||
WHERE ir.inscription_id = i.inscription_id
|
||||
) AS recursion_refs,
|
||||
i.block_height AS genesis_block_height,
|
||||
i.tx_index AS genesis_tx_index,
|
||||
i.timestamp AS genesis_timestamp,
|
||||
i.address AS genesis_address,
|
||||
cur.address,
|
||||
cur.tx_index,
|
||||
cur.block_height,
|
||||
${total === undefined ? sql`COUNT(*) OVER() AS total` : sql`0 AS total`},
|
||||
ROW_NUMBER() OVER(ORDER BY ${orderBy}) AS row_num
|
||||
FROM inscriptions AS i
|
||||
INNER JOIN current_locations AS cur ON cur.ordinal_number = i.ordinal_number
|
||||
INNER JOIN satoshis AS s ON s.ordinal_number = i.ordinal_number
|
||||
WHERE TRUE
|
||||
${
|
||||
filters?.genesis_id?.length
|
||||
? sql`AND i.inscription_id IN ${sql(filters.genesis_id)}`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.genesis_block_height
|
||||
? sql`AND i.block_height = ${filters.genesis_block_height}`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.genesis_block_hash
|
||||
? sql`AND i.block_hash = ${filters.genesis_block_hash}`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.from_genesis_block_height
|
||||
? sql`AND i.block_height >= ${filters.from_genesis_block_height}`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.to_genesis_block_height
|
||||
? sql`AND i.block_height <= ${filters.to_genesis_block_height}`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.from_sat_coinbase_height
|
||||
? sql`AND s.coinbase_height >= ${filters.from_sat_coinbase_height}`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.to_sat_coinbase_height
|
||||
? sql`AND s.coinbase_height <= ${filters.to_sat_coinbase_height}`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.from_genesis_timestamp
|
||||
? sql`AND i.timestamp >= ${filters.from_genesis_timestamp}::bigint / 1000`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.to_genesis_timestamp
|
||||
? sql`AND i.timestamp <= ${filters.to_genesis_timestamp}::bigint / 1000`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.from_sat_ordinal
|
||||
? sql`AND i.ordinal_number >= ${filters.from_sat_ordinal}`
|
||||
: sql``
|
||||
}
|
||||
${
|
||||
filters?.to_sat_ordinal
|
||||
? sql`AND i.ordinal_number <= ${filters.to_sat_ordinal}`
|
||||
: sql``
|
||||
}
|
||||
${filters?.number?.length ? sql`AND i.number IN ${sql(filters.number)}` : sql``}
|
||||
${
|
||||
filters?.from_number !== undefined
|
||||
? sql`AND i.number >= ${filters.from_number}`
|
||||
: sql``
|
||||
}
|
||||
${filters?.to_number !== undefined ? sql`AND i.number <= ${filters.to_number}` : sql``}
|
||||
${filters?.address?.length ? sql`AND cur.address IN ${sql(filters.address)}` : sql``}
|
||||
${
|
||||
filters?.mime_type?.length ? sql`AND i.mime_type IN ${sql(filters.mime_type)}` : sql``
|
||||
}
|
||||
${filters?.output ? sql`AND cur.output = ${filters.output}` : sql``}
|
||||
${filters?.sat_rarity?.length ? sql`AND s.rarity IN ${sql(filters.sat_rarity)}` : sql``}
|
||||
${filters?.sat_ordinal ? sql`AND i.ordinal_number = ${filters.sat_ordinal}` : sql``}
|
||||
${
|
||||
filters?.recursive !== undefined ? sql`AND i.recursive = ${filters.recursive}` : sql``
|
||||
}
|
||||
${filters?.cursed === true ? sql`AND i.number < 0` : sql``}
|
||||
${filters?.cursed === false ? sql`AND i.number >= 0` : sql``}
|
||||
${
|
||||
filters?.genesis_address?.length
|
||||
? sql`AND i.address IN ${sql(filters.genesis_address)}`
|
||||
: sql``
|
||||
}
|
||||
ORDER BY ${orderBy} LIMIT ${page.limit} OFFSET ${page.offset}
|
||||
)
|
||||
SELECT
|
||||
r.*,
|
||||
gen_l.block_hash AS genesis_block_hash,
|
||||
gen_l.tx_id AS genesis_tx_id,
|
||||
cur_l.tx_id,
|
||||
cur_l.output,
|
||||
cur_l.offset,
|
||||
cur_l.timestamp,
|
||||
cur_l.value
|
||||
FROM results AS r
|
||||
INNER JOIN locations AS cur_l ON cur_l.ordinal_number = r.sat_ordinal AND cur_l.block_height = r.block_height AND cur_l.tx_index = r.tx_index
|
||||
INNER JOIN locations AS gen_l ON gen_l.ordinal_number = r.sat_ordinal AND gen_l.block_height = r.genesis_block_height AND gen_l.tx_index = r.genesis_tx_index
|
||||
ORDER BY r.row_num ASC
|
||||
`;
|
||||
return {
|
||||
total: total ?? results[0]?.total ?? 0,
|
||||
results: results ?? [],
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async getInscriptionLocations(
|
||||
args: InscriptionIdentifier & { limit: number; offset: number }
|
||||
): Promise<DbPaginatedResult<DbLocation>> {
|
||||
const results = await this.sql<({ total: number } & DbLocation)[]>`
|
||||
SELECT l.*, COUNT(*) OVER() as total
|
||||
FROM locations AS l
|
||||
INNER JOIN inscriptions AS i ON i.ordinal_number = l.ordinal_number
|
||||
WHERE ${
|
||||
'number' in args
|
||||
? this.sql`i.number = ${args.number}`
|
||||
: this.sql`i.inscription_id = ${args.genesis_id}`
|
||||
}
|
||||
AND (
|
||||
(l.block_height > i.block_height)
|
||||
OR (l.block_height = i.block_height AND l.tx_index >= i.tx_index)
|
||||
)
|
||||
ORDER BY l.block_height DESC, l.tx_index DESC
|
||||
LIMIT ${args.limit}
|
||||
OFFSET ${args.offset}
|
||||
`;
|
||||
return {
|
||||
total: results[0]?.total ?? 0,
|
||||
results: results ?? [],
|
||||
};
|
||||
}
|
||||
|
||||
async getTransfersPerBlock(
|
||||
args: { block_height?: number; block_hash?: string } & DbInscriptionIndexPaging
|
||||
): Promise<DbPaginatedResult<DbInscriptionLocationChange>> {
|
||||
const results = await this.sql<({ total: number } & DbInscriptionLocationChange)[]>`
|
||||
WITH transfer_total AS (
|
||||
SELECT MAX(block_transfer_index) AS total FROM inscription_transfers WHERE ${
|
||||
'block_height' in args
|
||||
? this.sql`block_height = ${args.block_height}`
|
||||
: this.sql`block_hash = ${args.block_hash}`
|
||||
}
|
||||
),
|
||||
transfer_data AS (
|
||||
SELECT
|
||||
t.number,
|
||||
t.inscription_id AS genesis_id,
|
||||
t.ordinal_number,
|
||||
t.block_height,
|
||||
t.tx_index,
|
||||
t.block_transfer_index,
|
||||
(
|
||||
SELECT l.block_height || ',' || l.tx_index
|
||||
FROM locations AS l
|
||||
WHERE l.ordinal_number = t.ordinal_number AND (
|
||||
l.block_height < t.block_height OR
|
||||
(l.block_height = t.block_height AND l.tx_index < t.tx_index)
|
||||
)
|
||||
ORDER BY l.block_height DESC, l.tx_index DESC
|
||||
LIMIT 1
|
||||
) AS from_data
|
||||
FROM inscription_transfers AS t
|
||||
WHERE
|
||||
${
|
||||
'block_height' in args
|
||||
? this.sql`t.block_height = ${args.block_height}`
|
||||
: this.sql`t.block_hash = ${args.block_hash}`
|
||||
}
|
||||
AND t.block_transfer_index <= ((SELECT total FROM transfer_total) - ${args.offset}::int)
|
||||
AND t.block_transfer_index >
|
||||
((SELECT total FROM transfer_total) - (${args.offset}::int + ${args.limit}::int))
|
||||
)
|
||||
SELECT
|
||||
td.genesis_id,
|
||||
td.number,
|
||||
lf.block_height AS from_block_height,
|
||||
lf.block_hash AS from_block_hash,
|
||||
lf.tx_id AS from_tx_id,
|
||||
lf.address AS from_address,
|
||||
lf.output AS from_output,
|
||||
lf.offset AS from_offset,
|
||||
lf.value AS from_value,
|
||||
lf.timestamp AS from_timestamp,
|
||||
lt.block_height AS to_block_height,
|
||||
lt.block_hash AS to_block_hash,
|
||||
lt.tx_id AS to_tx_id,
|
||||
lt.address AS to_address,
|
||||
lt.output AS to_output,
|
||||
lt.offset AS to_offset,
|
||||
lt.value AS to_value,
|
||||
lt.timestamp AS to_timestamp,
|
||||
(SELECT total FROM transfer_total) + 1 AS total
|
||||
FROM transfer_data AS td
|
||||
INNER JOIN locations AS lf ON td.ordinal_number = lf.ordinal_number AND lf.block_height = SPLIT_PART(td.from_data, ',', 1)::int AND lf.tx_index = SPLIT_PART(td.from_data, ',', 2)::int
|
||||
INNER JOIN locations AS lt ON td.ordinal_number = lt.ordinal_number AND td.block_height = lt.block_height AND td.tx_index = lt.tx_index
|
||||
ORDER BY td.block_height DESC, td.block_transfer_index DESC
|
||||
`;
|
||||
return {
|
||||
total: results[0]?.total ?? 0,
|
||||
results: results ?? [],
|
||||
};
|
||||
}
|
||||
}
|
||||
135
api/ordinals/src/pg/types.ts
Normal file
135
api/ordinals/src/pg/types.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { Order, OrderBy } from '../api/schemas';
|
||||
import { SatoshiRarity } from '../api/util/ordinal-satoshi';
|
||||
|
||||
export type DbPaginatedResult<T> = {
|
||||
total: number;
|
||||
results: T[];
|
||||
};
|
||||
|
||||
export type DbFullyLocatedInscriptionResult = {
|
||||
genesis_id: string;
|
||||
genesis_block_height: string;
|
||||
genesis_block_hash: string;
|
||||
genesis_tx_id: string;
|
||||
genesis_fee: bigint;
|
||||
genesis_timestamp: number;
|
||||
genesis_address: string;
|
||||
number: string;
|
||||
address: string | null;
|
||||
tx_id: string;
|
||||
tx_index: number;
|
||||
output: string;
|
||||
offset: string | null;
|
||||
value: string | null;
|
||||
sat_ordinal: string;
|
||||
sat_rarity: string;
|
||||
sat_coinbase_height: string;
|
||||
mime_type: string;
|
||||
content_type: string;
|
||||
content_length: string;
|
||||
timestamp: number;
|
||||
curse_type: string | null;
|
||||
recursive: boolean;
|
||||
recursion_refs: string | null;
|
||||
parent: string | null;
|
||||
metadata: string | null;
|
||||
input_index: number;
|
||||
pointer: number | null;
|
||||
metaprotocol: string | null;
|
||||
delegate: string | null;
|
||||
};
|
||||
|
||||
export type DbLocation = {
|
||||
genesis_id: string;
|
||||
block_height: string;
|
||||
block_hash: string;
|
||||
tx_id: string;
|
||||
tx_index: number;
|
||||
address: string;
|
||||
output: string;
|
||||
offset: string | null;
|
||||
prev_output: string | null;
|
||||
prev_offset: string | null;
|
||||
value: string | null;
|
||||
timestamp: number;
|
||||
};
|
||||
|
||||
export type DbInscriptionLocationChange = {
|
||||
genesis_id: string;
|
||||
number: string;
|
||||
from_block_height: string;
|
||||
from_block_hash: string;
|
||||
from_tx_id: string;
|
||||
from_address: string;
|
||||
from_output: string;
|
||||
from_offset: string | null;
|
||||
from_value: string | null;
|
||||
from_timestamp: number;
|
||||
to_block_height: string;
|
||||
to_block_hash: string;
|
||||
to_tx_id: string;
|
||||
to_address: string;
|
||||
to_output: string;
|
||||
to_offset: string | null;
|
||||
to_value: string | null;
|
||||
to_timestamp: number;
|
||||
};
|
||||
|
||||
export type DbInscriptionContent = {
|
||||
content_type: string;
|
||||
content_length: string;
|
||||
content: string;
|
||||
};
|
||||
|
||||
export type DbInscriptionIndexPaging = {
|
||||
limit: number;
|
||||
offset: number;
|
||||
};
|
||||
|
||||
export type DbInscriptionIndexFilters = {
|
||||
genesis_id?: string[];
|
||||
genesis_block_height?: number;
|
||||
from_genesis_block_height?: number;
|
||||
to_genesis_block_height?: number;
|
||||
genesis_block_hash?: string;
|
||||
from_genesis_timestamp?: number;
|
||||
to_genesis_timestamp?: number;
|
||||
from_sat_coinbase_height?: number;
|
||||
to_sat_coinbase_height?: number;
|
||||
number?: number[];
|
||||
from_number?: number;
|
||||
to_number?: number;
|
||||
address?: string[];
|
||||
genesis_address?: string[];
|
||||
mime_type?: string[];
|
||||
output?: string;
|
||||
sat_rarity?: SatoshiRarity[];
|
||||
sat_ordinal?: bigint;
|
||||
from_sat_ordinal?: bigint;
|
||||
to_sat_ordinal?: bigint;
|
||||
recursive?: boolean;
|
||||
cursed?: boolean;
|
||||
};
|
||||
|
||||
export type DbInscriptionIndexOrder = {
|
||||
order_by?: OrderBy;
|
||||
order?: Order;
|
||||
};
|
||||
|
||||
export enum DbInscriptionType {
|
||||
blessed = 'blessed',
|
||||
cursed = 'cursed',
|
||||
}
|
||||
|
||||
export type DbInscriptionCountPerBlockFilters = {
|
||||
from_block_height?: number;
|
||||
to_block_height?: number;
|
||||
};
|
||||
|
||||
export type DbInscriptionCountPerBlock = {
|
||||
block_height: string;
|
||||
block_hash: string;
|
||||
inscription_count: string;
|
||||
inscription_count_accum: string;
|
||||
timestamp: number;
|
||||
};
|
||||
442
api/ordinals/tests/api/cache.test.ts
Normal file
442
api/ordinals/tests/api/cache.test.ts
Normal file
@@ -0,0 +1,442 @@
|
||||
import { buildApiServer } from '../../src/api/init';
|
||||
import { Brc20PgStore } from '../../src/pg/brc20/brc20-pg-store';
|
||||
import { PgStore } from '../../src/pg/pg-store';
|
||||
import {
|
||||
BRC20_MIGRATIONS_DIR,
|
||||
ORDINALS_MIGRATIONS_DIR,
|
||||
TestFastifyServer,
|
||||
clearDb,
|
||||
inscriptionReveal,
|
||||
inscriptionTransfer,
|
||||
randomHash,
|
||||
runMigrations,
|
||||
} from '../helpers';
|
||||
|
||||
describe('ETag cache', () => {
|
||||
let db: PgStore;
|
||||
let brc20Db: Brc20PgStore;
|
||||
let fastify: TestFastifyServer;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await PgStore.connect();
|
||||
await runMigrations(db.sql, ORDINALS_MIGRATIONS_DIR);
|
||||
brc20Db = await Brc20PgStore.connect();
|
||||
await runMigrations(brc20Db.sql, BRC20_MIGRATIONS_DIR);
|
||||
fastify = await buildApiServer({ db, brc20Db });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fastify.close();
|
||||
await clearDb(db.sql);
|
||||
await db.close();
|
||||
await clearDb(brc20Db.sql);
|
||||
await brc20Db.close();
|
||||
});
|
||||
|
||||
test('inscription cache control', async () => {
|
||||
await inscriptionReveal(db.sql, {
|
||||
inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
ordinal_number: '257418248345364',
|
||||
number: '0',
|
||||
classic_number: '0',
|
||||
block_height: '775617',
|
||||
block_hash: '000000000000000000016bcbcc915c68bce367e18f09d0945dc6aacc0ee20121',
|
||||
tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc',
|
||||
tx_index: 0,
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
mime_type: 'image/png',
|
||||
content_type: 'image/png',
|
||||
content_length: 5,
|
||||
content: '0x48656C6C6F',
|
||||
fee: '2805',
|
||||
curse_type: null,
|
||||
recursive: false,
|
||||
input_index: 0,
|
||||
pointer: null,
|
||||
metadata: null,
|
||||
metaprotocol: null,
|
||||
parent: null,
|
||||
delegate: null,
|
||||
timestamp: 10000,
|
||||
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
|
||||
offset: '0',
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
value: '10000',
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
coinbase_height: '9000',
|
||||
});
|
||||
const response = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.headers.etag).not.toBeUndefined();
|
||||
const etag1 = response.headers.etag;
|
||||
|
||||
// Check on numbered id too
|
||||
const nResponse = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/0',
|
||||
});
|
||||
expect(nResponse.statusCode).toBe(200);
|
||||
expect(nResponse.headers.etag).not.toBeUndefined();
|
||||
const nEtag = nResponse.headers.etag;
|
||||
expect(nEtag).toBe(etag1);
|
||||
|
||||
// Cached response
|
||||
const cached = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
headers: { 'if-none-match': etag1 },
|
||||
});
|
||||
expect(cached.statusCode).toBe(304);
|
||||
const nCached = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/0',
|
||||
headers: { 'if-none-match': etag1 },
|
||||
});
|
||||
expect(nCached.statusCode).toBe(304);
|
||||
|
||||
// Perform transfer and check cache
|
||||
await inscriptionTransfer(db.sql, {
|
||||
ordinal_number: '257418248345364',
|
||||
block_height: '775618',
|
||||
tx_index: 0,
|
||||
tx_id: 'bdda0d240132bab2af7f797d1507beb1acab6ad43e2c0ef7f96291aea5cc3444',
|
||||
block_hash: '00000000000000000000a9db2c5d6c5445e7191927d6981ec580ed3c8112e342',
|
||||
address: 'bc1p3xqwzmddceqrd6x9yxplqzkl5vucta2gqm5szpkmpuvcvgs7g8psjf8htd',
|
||||
output: 'bdda0d240132bab2af7f797d1507beb1acab6ad43e2c0ef7f96291aea5cc3444:0',
|
||||
offset: '0',
|
||||
prev_output: 'da2da520f055e9fadaf1a78b3e01bc53596dcbb88e9c9f53bcb61b98310b1006:0',
|
||||
prev_offset: '0',
|
||||
value: '8000',
|
||||
transfer_type: 'transferred',
|
||||
timestamp: 10001,
|
||||
inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
number: '0',
|
||||
from_block_height: '775617',
|
||||
from_tx_index: 0,
|
||||
block_transfer_index: 0,
|
||||
});
|
||||
const cached2 = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
headers: { 'if-none-match': etag1 },
|
||||
});
|
||||
expect(cached2.statusCode).toBe(200);
|
||||
const nCached2 = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/0',
|
||||
headers: { 'if-none-match': etag1 },
|
||||
});
|
||||
expect(nCached2.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
test('inscriptions index cache control', async () => {
|
||||
await inscriptionReveal(db.sql, {
|
||||
content: '0x48656C6C6F',
|
||||
content_type: 'text/plain',
|
||||
content_length: 5,
|
||||
number: '0',
|
||||
classic_number: '0',
|
||||
fee: '705',
|
||||
inscription_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0',
|
||||
value: '10000',
|
||||
address: 'bc1pscktlmn99gyzlvymvrezh6vwd0l4kg06tg5rvssw0czg8873gz5sdkteqj',
|
||||
ordinal_number: '257418248345364',
|
||||
coinbase_height: '650000',
|
||||
offset: '0',
|
||||
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
|
||||
input_index: 0,
|
||||
tx_index: 0,
|
||||
curse_type: null,
|
||||
pointer: null,
|
||||
delegate: null,
|
||||
metaprotocol: null,
|
||||
metadata: null,
|
||||
parent: null,
|
||||
block_height: '778575',
|
||||
block_hash: '000000000000000000016bcbcc915c68bce367e18f09d0945dc6aacc0ee20121',
|
||||
tx_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201',
|
||||
mime_type: 'text/plain',
|
||||
recursive: false,
|
||||
timestamp: 10000,
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
});
|
||||
await inscriptionReveal(db.sql, {
|
||||
content: '0x48656C6C6F',
|
||||
content_type: 'image/png',
|
||||
content_length: 5,
|
||||
number: '1',
|
||||
classic_number: '1',
|
||||
fee: '2805',
|
||||
inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
value: '10000',
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
ordinal_number: '1676913207',
|
||||
coinbase_height: '650000',
|
||||
offset: '0',
|
||||
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
|
||||
input_index: 0,
|
||||
tx_index: 0,
|
||||
curse_type: null,
|
||||
pointer: null,
|
||||
delegate: null,
|
||||
metaprotocol: null,
|
||||
metadata: null,
|
||||
parent: null,
|
||||
block_height: '778576',
|
||||
block_hash: '00000000000000000000a9db2c5d6c5445e7191927d6981ec580ed3c8112e342',
|
||||
tx_id: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d',
|
||||
mime_type: 'image/png',
|
||||
recursive: false,
|
||||
timestamp: 91827390,
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
});
|
||||
|
||||
// ETag response
|
||||
const response = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions',
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.headers.etag).not.toBeUndefined();
|
||||
const etag = response.headers.etag;
|
||||
|
||||
// Cached
|
||||
const cached = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions',
|
||||
headers: { 'if-none-match': etag },
|
||||
});
|
||||
expect(cached.statusCode).toBe(304);
|
||||
|
||||
// New location
|
||||
await inscriptionTransfer(db.sql, {
|
||||
ordinal_number: '257418248345364',
|
||||
block_height: '778577',
|
||||
tx_index: 0,
|
||||
tx_id: 'ae9d273a10e899f0d2cad47ee2b0e77ab8a9addd9dd5bb5e4b03d6971c060d52',
|
||||
block_hash: 'ae9d273a10e899f0d2cad47ee2b0e77ab8a9addd9dd5bb5e4b03d6971c060d52',
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
output: 'ae9d273a10e899f0d2cad47ee2b0e77ab8a9addd9dd5bb5e4b03d6971c060d52:0',
|
||||
offset: '0',
|
||||
prev_output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
|
||||
prev_offset: '0',
|
||||
value: '100',
|
||||
transfer_type: 'transferred',
|
||||
timestamp: 1010101010,
|
||||
inscription_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0',
|
||||
number: '0',
|
||||
from_block_height: '778575',
|
||||
from_tx_index: 0,
|
||||
block_transfer_index: 0,
|
||||
});
|
||||
const cached2 = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions',
|
||||
headers: { 'if-none-match': etag },
|
||||
});
|
||||
expect(cached2.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
test('inscriptions stats per block cache control', async () => {
|
||||
await inscriptionReveal(db.sql, {
|
||||
content: '0x48656C6C6F',
|
||||
content_type: 'text/plain',
|
||||
content_length: 5,
|
||||
number: '0',
|
||||
classic_number: '0',
|
||||
fee: '705',
|
||||
inscription_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0',
|
||||
value: '10000',
|
||||
address: 'bc1pscktlmn99gyzlvymvrezh6vwd0l4kg06tg5rvssw0czg8873gz5sdkteqj',
|
||||
ordinal_number: '257418248345364',
|
||||
coinbase_height: '650000',
|
||||
offset: '0',
|
||||
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
|
||||
input_index: 0,
|
||||
tx_index: 0,
|
||||
curse_type: null,
|
||||
pointer: null,
|
||||
delegate: null,
|
||||
metaprotocol: null,
|
||||
metadata: null,
|
||||
parent: null,
|
||||
block_height: '778575',
|
||||
block_hash: randomHash(),
|
||||
tx_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201',
|
||||
mime_type: 'text/plain',
|
||||
recursive: false,
|
||||
timestamp: 817263,
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
});
|
||||
|
||||
// ETag response
|
||||
const response = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/stats/inscriptions',
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.headers.etag).not.toBeUndefined();
|
||||
const etag = response.headers.etag;
|
||||
|
||||
// Cached
|
||||
const cached = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/stats/inscriptions',
|
||||
headers: { 'if-none-match': etag },
|
||||
});
|
||||
expect(cached.statusCode).toBe(304);
|
||||
|
||||
// New block
|
||||
await inscriptionReveal(db.sql, {
|
||||
content: '0x48656C6C6F',
|
||||
content_type: 'image/png',
|
||||
content_length: 5,
|
||||
number: '1',
|
||||
classic_number: '1',
|
||||
fee: '2805',
|
||||
inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
value: '10000',
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
ordinal_number: '1676913207',
|
||||
coinbase_height: '650000',
|
||||
offset: '0',
|
||||
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
|
||||
input_index: 0,
|
||||
tx_index: 0,
|
||||
curse_type: null,
|
||||
pointer: null,
|
||||
delegate: null,
|
||||
metaprotocol: null,
|
||||
metadata: null,
|
||||
parent: null,
|
||||
block_height: '778576',
|
||||
block_hash: randomHash(),
|
||||
tx_id: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d',
|
||||
mime_type: 'image/png',
|
||||
recursive: false,
|
||||
timestamp: 9812673817263,
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
});
|
||||
|
||||
// Cache busted
|
||||
const cacheBusted = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/stats/inscriptions',
|
||||
headers: { 'if-none-match': etag },
|
||||
});
|
||||
expect(cacheBusted.statusCode).toBe(200);
|
||||
});
|
||||
|
||||
test('status etag changes with new block', async () => {
|
||||
await inscriptionReveal(db.sql, {
|
||||
content: '0x48656C6C6F',
|
||||
content_type: 'text/plain',
|
||||
content_length: 5,
|
||||
number: '0',
|
||||
classic_number: '0',
|
||||
fee: '705',
|
||||
inscription_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0',
|
||||
value: '10000',
|
||||
address: 'bc1pscktlmn99gyzlvymvrezh6vwd0l4kg06tg5rvssw0czg8873gz5sdkteqj',
|
||||
ordinal_number: '257418248345364',
|
||||
coinbase_height: '650000',
|
||||
offset: '0',
|
||||
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
|
||||
input_index: 0,
|
||||
tx_index: 0,
|
||||
curse_type: null,
|
||||
pointer: null,
|
||||
delegate: null,
|
||||
metaprotocol: null,
|
||||
metadata: null,
|
||||
parent: null,
|
||||
block_height: '778575',
|
||||
block_hash: randomHash(),
|
||||
tx_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201',
|
||||
mime_type: 'text/plain',
|
||||
recursive: false,
|
||||
timestamp: 817263,
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
});
|
||||
|
||||
// ETag response
|
||||
const response = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/',
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.headers.etag).not.toBeUndefined();
|
||||
const etag = response.headers.etag;
|
||||
|
||||
// Cached
|
||||
const cached = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/',
|
||||
headers: { 'if-none-match': etag },
|
||||
});
|
||||
expect(cached.statusCode).toBe(304);
|
||||
|
||||
// New block
|
||||
await inscriptionReveal(db.sql, {
|
||||
content: '0x48656C6C6F',
|
||||
content_type: 'image/png',
|
||||
content_length: 5,
|
||||
number: '1',
|
||||
classic_number: '1',
|
||||
fee: '2805',
|
||||
inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
value: '10000',
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
ordinal_number: '1676913207',
|
||||
coinbase_height: '650000',
|
||||
offset: '0',
|
||||
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
|
||||
input_index: 0,
|
||||
tx_index: 0,
|
||||
curse_type: null,
|
||||
pointer: null,
|
||||
delegate: null,
|
||||
metaprotocol: null,
|
||||
metadata: null,
|
||||
parent: null,
|
||||
block_height: '778576',
|
||||
block_hash: randomHash(),
|
||||
tx_id: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d',
|
||||
mime_type: 'image/png',
|
||||
recursive: false,
|
||||
timestamp: 981739781273,
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
});
|
||||
|
||||
// Cache busted
|
||||
const cacheBusted = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/',
|
||||
headers: { 'if-none-match': etag },
|
||||
});
|
||||
expect(cacheBusted.statusCode).toBe(200);
|
||||
});
|
||||
});
|
||||
3683
api/ordinals/tests/api/inscriptions.test.ts
Normal file
3683
api/ordinals/tests/api/inscriptions.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
87
api/ordinals/tests/api/ordinal-satoshi.test.ts
Normal file
87
api/ordinals/tests/api/ordinal-satoshi.test.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { OrdinalSatoshi, SatoshiRarity } from '../../src/api/util/ordinal-satoshi';
|
||||
|
||||
describe('OrdinalSatoshi', () => {
|
||||
test('mythic sat', () => {
|
||||
const sat = new OrdinalSatoshi(0);
|
||||
expect(sat.rarity).toBe(SatoshiRarity.mythic);
|
||||
expect(sat.degree).toBe('0°0′0″0‴');
|
||||
expect(sat.decimal).toBe('0.0');
|
||||
expect(sat.cycle).toBe(0);
|
||||
expect(sat.epoch).toBe(0);
|
||||
expect(sat.name).toBe('nvtdijuwxlp');
|
||||
expect(sat.offset).toBe(0);
|
||||
expect(sat.percentile).toBe('0%');
|
||||
expect(sat.period).toBe(0);
|
||||
expect(sat.blockHeight).toBe(0);
|
||||
});
|
||||
|
||||
test('legendary sat', () => {
|
||||
const sat = new OrdinalSatoshi(2067187500000000);
|
||||
expect(sat.rarity).toBe(SatoshiRarity.legendary);
|
||||
expect(sat.degree).toBe('1°0′0″0‴');
|
||||
expect(sat.decimal).toBe('1260000.0');
|
||||
expect(sat.cycle).toBe(1);
|
||||
expect(sat.epoch).toBe(6);
|
||||
expect(sat.name).toBe('fachfvytgb');
|
||||
expect(sat.offset).toBe(0);
|
||||
expect(sat.percentile).toBe('98.4375001082813%');
|
||||
expect(sat.period).toBe(625);
|
||||
expect(sat.blockHeight).toBe(1260000);
|
||||
});
|
||||
|
||||
test('epic sat', () => {
|
||||
const sat = new OrdinalSatoshi(1050000000000000);
|
||||
expect(sat.rarity).toBe(SatoshiRarity.epic);
|
||||
expect(sat.degree).toBe('0°0′336″0‴');
|
||||
expect(sat.decimal).toBe('210000.0');
|
||||
expect(sat.cycle).toBe(0);
|
||||
expect(sat.epoch).toBe(1);
|
||||
expect(sat.name).toBe('gkjbdrhkfqf');
|
||||
expect(sat.offset).toBe(0);
|
||||
expect(sat.percentile).toBe('50.00000005500003%');
|
||||
expect(sat.period).toBe(104);
|
||||
expect(sat.blockHeight).toBe(210000);
|
||||
});
|
||||
|
||||
test('rare sat', () => {
|
||||
const sat = new OrdinalSatoshi(10080000000000);
|
||||
expect(sat.rarity).toBe(SatoshiRarity.rare);
|
||||
expect(sat.degree).toBe('0°2016′0″0‴');
|
||||
expect(sat.decimal).toBe('2016.0');
|
||||
expect(sat.cycle).toBe(0);
|
||||
expect(sat.epoch).toBe(0);
|
||||
expect(sat.name).toBe('ntwwidfrzxh');
|
||||
expect(sat.offset).toBe(0);
|
||||
expect(sat.percentile).toBe('0.48000000052800024%');
|
||||
expect(sat.period).toBe(1);
|
||||
expect(sat.blockHeight).toBe(2016);
|
||||
});
|
||||
|
||||
test('uncommon sat', () => {
|
||||
const sat = new OrdinalSatoshi(5000000000);
|
||||
expect(sat.rarity).toBe(SatoshiRarity.uncommon);
|
||||
expect(sat.degree).toBe('0°1′1″0‴');
|
||||
expect(sat.decimal).toBe('1.0');
|
||||
expect(sat.cycle).toBe(0);
|
||||
expect(sat.epoch).toBe(0);
|
||||
expect(sat.name).toBe('nvtcsezkbth');
|
||||
expect(sat.offset).toBe(0);
|
||||
expect(sat.percentile).toBe('0.00023809523835714296%');
|
||||
expect(sat.period).toBe(0);
|
||||
expect(sat.blockHeight).toBe(1);
|
||||
});
|
||||
|
||||
test('common sat', () => {
|
||||
const sat = new OrdinalSatoshi(200);
|
||||
expect(sat.rarity).toBe(SatoshiRarity.common);
|
||||
expect(sat.degree).toBe('0°0′0″200‴');
|
||||
expect(sat.decimal).toBe('0.200');
|
||||
expect(sat.cycle).toBe(0);
|
||||
expect(sat.epoch).toBe(0);
|
||||
expect(sat.name).toBe('nvtdijuwxdx');
|
||||
expect(sat.offset).toBe(200);
|
||||
expect(sat.percentile).toBe('0.000000000009523809534285719%');
|
||||
expect(sat.period).toBe(0);
|
||||
expect(sat.blockHeight).toBe(0);
|
||||
});
|
||||
});
|
||||
327
api/ordinals/tests/api/sats.test.ts
Normal file
327
api/ordinals/tests/api/sats.test.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
import { buildApiServer } from '../../src/api/init';
|
||||
import { PgStore } from '../../src/pg/pg-store';
|
||||
import { Brc20PgStore } from '../../src/pg/brc20/brc20-pg-store';
|
||||
import {
|
||||
BRC20_MIGRATIONS_DIR,
|
||||
clearDb,
|
||||
inscriptionReveal,
|
||||
inscriptionTransfer,
|
||||
ORDINALS_MIGRATIONS_DIR,
|
||||
runMigrations,
|
||||
TestFastifyServer,
|
||||
} from '../helpers';
|
||||
|
||||
describe('/sats', () => {
|
||||
let db: PgStore;
|
||||
let brc20Db: Brc20PgStore;
|
||||
let fastify: TestFastifyServer;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await PgStore.connect();
|
||||
await runMigrations(db.sql, ORDINALS_MIGRATIONS_DIR);
|
||||
brc20Db = await Brc20PgStore.connect();
|
||||
await runMigrations(brc20Db.sql, BRC20_MIGRATIONS_DIR);
|
||||
fastify = await buildApiServer({ db, brc20Db });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fastify.close();
|
||||
await clearDb(db.sql);
|
||||
await db.close();
|
||||
await clearDb(brc20Db.sql);
|
||||
await brc20Db.close();
|
||||
});
|
||||
|
||||
test('returns valid sat', async () => {
|
||||
const response = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/sats/10080000000001',
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.json()).toStrictEqual({
|
||||
coinbase_height: 2016,
|
||||
cycle: 0,
|
||||
decimal: '2016.1',
|
||||
degree: '0°2016′0″1‴',
|
||||
epoch: 0,
|
||||
name: 'ntwwidfrzxg',
|
||||
offset: 1,
|
||||
percentile: '0.48000000052804787%',
|
||||
period: 1,
|
||||
rarity: 'common',
|
||||
});
|
||||
});
|
||||
|
||||
test('returns sat with inscription', async () => {
|
||||
await inscriptionReveal(db.sql, {
|
||||
content: '0x48656C6C6F',
|
||||
content_type: 'image/png',
|
||||
content_length: 5,
|
||||
number: '0',
|
||||
classic_number: '0',
|
||||
fee: '2805',
|
||||
inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
value: '10000',
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
ordinal_number: '257418248345364',
|
||||
coinbase_height: '650000',
|
||||
offset: '0',
|
||||
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
|
||||
input_index: 0,
|
||||
tx_index: 0,
|
||||
curse_type: null,
|
||||
pointer: null,
|
||||
delegate: null,
|
||||
metaprotocol: null,
|
||||
metadata: null,
|
||||
parent: null,
|
||||
block_height: '775617',
|
||||
block_hash: '163de66dc9c0949905bfe8e148bde04600223cf88d19f26fdbeba1d6e6fa0f88',
|
||||
tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc',
|
||||
mime_type: 'image/png',
|
||||
recursive: false,
|
||||
timestamp: 1676913207,
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
});
|
||||
const response = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/sats/257418248345364',
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.json().inscription_id).toBe(
|
||||
'38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0'
|
||||
);
|
||||
});
|
||||
|
||||
test('returns sat with more than 1 inscription', async () => {
|
||||
await inscriptionReveal(db.sql, {
|
||||
content: '0x48656C6C6F',
|
||||
content_type: 'image/png',
|
||||
content_length: 5,
|
||||
number: '-7',
|
||||
classic_number: '-7',
|
||||
fee: '2805',
|
||||
inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
value: '10000',
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
ordinal_number: '257418248345364',
|
||||
coinbase_height: '650000',
|
||||
offset: '0',
|
||||
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
|
||||
curse_type: 'p2wsh',
|
||||
input_index: 0,
|
||||
tx_index: 0,
|
||||
pointer: null,
|
||||
delegate: null,
|
||||
metaprotocol: null,
|
||||
metadata: null,
|
||||
parent: null,
|
||||
block_height: '775617',
|
||||
block_hash: '163de66dc9c0949905bfe8e148bde04600223cf88d19f26fdbeba1d6e6fa0f88',
|
||||
tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc',
|
||||
mime_type: 'image/png',
|
||||
recursive: false,
|
||||
timestamp: 1676913207,
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
});
|
||||
await inscriptionReveal(db.sql, {
|
||||
content: '0x48656C6C6F',
|
||||
content_type: 'image/png',
|
||||
content_length: 5,
|
||||
number: '-1',
|
||||
classic_number: '-1',
|
||||
fee: '2805',
|
||||
inscription_id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993i0',
|
||||
value: '10000',
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
// Same sat. This will also create a transfer for the previous inscription.
|
||||
ordinal_number: '257418248345364',
|
||||
coinbase_height: '650000',
|
||||
offset: '0',
|
||||
output: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0',
|
||||
curse_type: 'p2wsh',
|
||||
input_index: 0,
|
||||
tx_index: 0,
|
||||
pointer: null,
|
||||
delegate: null,
|
||||
metaprotocol: null,
|
||||
metadata: null,
|
||||
parent: null,
|
||||
block_height: '775618',
|
||||
block_hash: '000000000000000000002a244dc7dfcf8ab85e42d182531c27197fc125086f19',
|
||||
tx_id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993',
|
||||
mime_type: 'image/png',
|
||||
recursive: false,
|
||||
timestamp: 1676913207,
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
});
|
||||
// Simulate the inscription transfer for -7
|
||||
await inscriptionTransfer(db.sql, {
|
||||
ordinal_number: '257418248345364',
|
||||
block_height: '775618',
|
||||
tx_index: 0,
|
||||
tx_id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993',
|
||||
block_hash: '000000000000000000002a244dc7dfcf8ab85e42d182531c27197fc125086f19',
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
output: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0',
|
||||
offset: '0',
|
||||
prev_output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
|
||||
prev_offset: '0',
|
||||
value: '10000',
|
||||
transfer_type: 'transferred',
|
||||
timestamp: 1676913207,
|
||||
inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
number: '-7',
|
||||
from_block_height: '775617',
|
||||
from_tx_index: 0,
|
||||
block_transfer_index: 0,
|
||||
});
|
||||
const response = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/sats/257418248345364/inscriptions',
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
const json = response.json();
|
||||
expect(json.total).toBe(2);
|
||||
expect(json.results).toStrictEqual([
|
||||
{
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
content_length: 5,
|
||||
content_type: 'image/png',
|
||||
genesis_address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
genesis_block_hash: '000000000000000000002a244dc7dfcf8ab85e42d182531c27197fc125086f19',
|
||||
genesis_block_height: 775618,
|
||||
genesis_fee: '2805',
|
||||
genesis_timestamp: 1676913207000,
|
||||
genesis_tx_id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993',
|
||||
id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993i0',
|
||||
location: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0:0',
|
||||
mime_type: 'image/png',
|
||||
number: -1,
|
||||
offset: '0',
|
||||
output: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0',
|
||||
sat_coinbase_height: 650000,
|
||||
sat_ordinal: '257418248345364',
|
||||
sat_rarity: 'common',
|
||||
timestamp: 1676913207000,
|
||||
tx_id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993',
|
||||
value: '10000',
|
||||
curse_type: 'p2wsh',
|
||||
recursive: false,
|
||||
recursion_refs: null,
|
||||
parent: null,
|
||||
metadata: null,
|
||||
meta_protocol: null,
|
||||
delegate: null,
|
||||
},
|
||||
{
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
content_length: 5,
|
||||
content_type: 'image/png',
|
||||
genesis_address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
genesis_block_hash: '163de66dc9c0949905bfe8e148bde04600223cf88d19f26fdbeba1d6e6fa0f88',
|
||||
genesis_block_height: 775617,
|
||||
genesis_fee: '2805',
|
||||
genesis_timestamp: 1676913207000,
|
||||
genesis_tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc',
|
||||
id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
// Re-inscribed sat is moved to the latest inscription's location.
|
||||
location: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0:0',
|
||||
mime_type: 'image/png',
|
||||
number: -7,
|
||||
offset: '0',
|
||||
output: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0',
|
||||
sat_coinbase_height: 650000,
|
||||
sat_ordinal: '257418248345364',
|
||||
sat_rarity: 'common',
|
||||
timestamp: 1676913207000,
|
||||
tx_id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993',
|
||||
value: '10000',
|
||||
curse_type: 'p2wsh',
|
||||
recursive: false,
|
||||
recursion_refs: null,
|
||||
parent: null,
|
||||
metadata: null,
|
||||
meta_protocol: null,
|
||||
delegate: null,
|
||||
},
|
||||
]);
|
||||
|
||||
// Inscription -7 should have 2 locations, -1 should only have 1.
|
||||
let transfersResponse = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/-7/transfers',
|
||||
});
|
||||
expect(transfersResponse.statusCode).toBe(200);
|
||||
let transferJson = transfersResponse.json();
|
||||
expect(transferJson.total).toBe(2);
|
||||
expect(transferJson.results).toHaveLength(2);
|
||||
expect(transferJson.results[0].location).toBe(
|
||||
'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0:0'
|
||||
);
|
||||
expect(transferJson.results[1].location).toBe(
|
||||
'38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0:0'
|
||||
);
|
||||
|
||||
transfersResponse = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/-1/transfers',
|
||||
});
|
||||
expect(transfersResponse.statusCode).toBe(200);
|
||||
transferJson = transfersResponse.json();
|
||||
expect(transferJson.total).toBe(1);
|
||||
expect(transferJson.results).toHaveLength(1);
|
||||
expect(transferJson.results[0].location).toBe(
|
||||
'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993:0:0'
|
||||
);
|
||||
|
||||
// Block transfer activity should reflect all true transfers.
|
||||
transfersResponse = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/transfers?block=775617',
|
||||
});
|
||||
expect(transfersResponse.statusCode).toBe(200);
|
||||
transferJson = transfersResponse.json();
|
||||
expect(transferJson.total).toBe(0);
|
||||
expect(transferJson.results).toHaveLength(0);
|
||||
|
||||
transfersResponse = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/inscriptions/transfers?block=775618',
|
||||
});
|
||||
expect(transfersResponse.statusCode).toBe(200);
|
||||
transferJson = transfersResponse.json();
|
||||
expect(transferJson.total).toBe(1);
|
||||
expect(transferJson.results).toHaveLength(1);
|
||||
expect(transferJson.results[0].number).toBe(-7);
|
||||
});
|
||||
|
||||
test('returns not found on invalid sats', async () => {
|
||||
const response1 = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/sats/2099999997690000',
|
||||
});
|
||||
expect(response1.statusCode).toBe(400);
|
||||
|
||||
const response2 = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/sats/-1',
|
||||
});
|
||||
expect(response2.statusCode).toBe(400);
|
||||
|
||||
const response3 = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/sats/Infinity',
|
||||
});
|
||||
expect(response3.statusCode).toBe(400);
|
||||
});
|
||||
});
|
||||
248
api/ordinals/tests/api/stats.test.ts
Normal file
248
api/ordinals/tests/api/stats.test.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import { buildApiServer } from '../../src/api/init';
|
||||
import { Brc20PgStore } from '../../src/pg/brc20/brc20-pg-store';
|
||||
import { PgStore } from '../../src/pg/pg-store';
|
||||
import {
|
||||
TestFastifyServer,
|
||||
ORDINALS_MIGRATIONS_DIR,
|
||||
BRC20_MIGRATIONS_DIR,
|
||||
clearDb,
|
||||
runMigrations,
|
||||
insertTestCountsByBlock,
|
||||
} from '../helpers';
|
||||
|
||||
describe('/stats', () => {
|
||||
let db: PgStore;
|
||||
let brc20Db: Brc20PgStore;
|
||||
let fastify: TestFastifyServer;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await PgStore.connect();
|
||||
await runMigrations(db.sql, ORDINALS_MIGRATIONS_DIR);
|
||||
brc20Db = await Brc20PgStore.connect();
|
||||
await runMigrations(brc20Db.sql, BRC20_MIGRATIONS_DIR);
|
||||
fastify = await buildApiServer({ db, brc20Db });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fastify.close();
|
||||
await clearDb(db.sql);
|
||||
await db.close();
|
||||
await clearDb(brc20Db.sql);
|
||||
await brc20Db.close();
|
||||
});
|
||||
|
||||
describe('/stats/inscriptions', () => {
|
||||
const bh = '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d';
|
||||
const ts = 1676913207000;
|
||||
|
||||
describe('event processing', () => {
|
||||
const EXPECTED = {
|
||||
results: [
|
||||
{
|
||||
block_hash: bh,
|
||||
block_height: '778010',
|
||||
inscription_count: '3',
|
||||
inscription_count_accum: '9',
|
||||
timestamp: ts,
|
||||
},
|
||||
{
|
||||
block_hash: bh,
|
||||
block_height: '778005',
|
||||
inscription_count: '2',
|
||||
inscription_count_accum: '6',
|
||||
timestamp: ts,
|
||||
},
|
||||
{
|
||||
block_hash: bh,
|
||||
block_height: '778002',
|
||||
inscription_count: '1',
|
||||
inscription_count_accum: '4',
|
||||
timestamp: ts,
|
||||
},
|
||||
{
|
||||
block_hash: bh,
|
||||
block_height: '778001',
|
||||
inscription_count: '1',
|
||||
inscription_count_accum: '3',
|
||||
timestamp: ts,
|
||||
},
|
||||
{
|
||||
block_hash: bh,
|
||||
block_height: '778000',
|
||||
inscription_count: '2',
|
||||
inscription_count_accum: '2',
|
||||
timestamp: ts,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
test('returns stats when processing blocks in order', async () => {
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778000',
|
||||
block_hash: bh,
|
||||
inscription_count: 2,
|
||||
inscription_count_accum: 2,
|
||||
timestamp: ts,
|
||||
});
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778001',
|
||||
block_hash: bh,
|
||||
inscription_count: 1,
|
||||
inscription_count_accum: 3,
|
||||
timestamp: ts,
|
||||
});
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778002',
|
||||
block_hash: bh,
|
||||
inscription_count: 1,
|
||||
inscription_count_accum: 4,
|
||||
timestamp: ts,
|
||||
});
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778005',
|
||||
block_hash: bh,
|
||||
inscription_count: 2,
|
||||
inscription_count_accum: 6,
|
||||
timestamp: ts,
|
||||
});
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778010',
|
||||
block_hash: bh,
|
||||
inscription_count: 3,
|
||||
inscription_count_accum: 9,
|
||||
timestamp: ts,
|
||||
});
|
||||
|
||||
const response = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/stats/inscriptions',
|
||||
});
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.json()).toStrictEqual(EXPECTED);
|
||||
});
|
||||
});
|
||||
|
||||
test('range filters', async () => {
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778000',
|
||||
block_hash: bh,
|
||||
inscription_count: 1,
|
||||
inscription_count_accum: 1,
|
||||
timestamp: ts,
|
||||
});
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778001',
|
||||
block_hash: bh,
|
||||
inscription_count: 1,
|
||||
inscription_count_accum: 2,
|
||||
timestamp: ts,
|
||||
});
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778002',
|
||||
block_hash: bh,
|
||||
inscription_count: 1,
|
||||
inscription_count_accum: 3,
|
||||
timestamp: ts,
|
||||
});
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778005',
|
||||
block_hash: bh,
|
||||
inscription_count: 2,
|
||||
inscription_count_accum: 5,
|
||||
timestamp: ts,
|
||||
});
|
||||
await insertTestCountsByBlock(db.sql, {
|
||||
block_height: '778010',
|
||||
block_hash: bh,
|
||||
inscription_count: 1,
|
||||
inscription_count_accum: 6,
|
||||
timestamp: ts,
|
||||
});
|
||||
|
||||
const responseFrom = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/stats/inscriptions',
|
||||
query: { from_block_height: '778004' },
|
||||
});
|
||||
expect(responseFrom.statusCode).toBe(200);
|
||||
expect(responseFrom.json()).toStrictEqual({
|
||||
results: [
|
||||
{
|
||||
block_height: '778010',
|
||||
block_hash: bh,
|
||||
inscription_count: '1',
|
||||
inscription_count_accum: '6',
|
||||
timestamp: ts,
|
||||
},
|
||||
{
|
||||
block_height: '778005',
|
||||
block_hash: bh,
|
||||
inscription_count: '2',
|
||||
inscription_count_accum: '5',
|
||||
timestamp: ts,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const responseTo = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/stats/inscriptions',
|
||||
query: { to_block_height: '778004' },
|
||||
});
|
||||
expect(responseTo.statusCode).toBe(200);
|
||||
expect(responseTo.json()).toStrictEqual({
|
||||
results: [
|
||||
{
|
||||
block_height: '778002',
|
||||
block_hash: bh,
|
||||
inscription_count: '1',
|
||||
inscription_count_accum: '3',
|
||||
timestamp: ts,
|
||||
},
|
||||
{
|
||||
block_height: '778001',
|
||||
block_hash: bh,
|
||||
inscription_count: '1',
|
||||
inscription_count_accum: '2',
|
||||
timestamp: ts,
|
||||
},
|
||||
{
|
||||
block_height: '778000',
|
||||
block_hash: bh,
|
||||
inscription_count: '1',
|
||||
inscription_count_accum: '1',
|
||||
timestamp: ts,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const responseFromTo = await fastify.inject({
|
||||
method: 'GET',
|
||||
url: '/ordinals/v1/stats/inscriptions',
|
||||
query: {
|
||||
from_block_height: '778002',
|
||||
to_block_height: '778005',
|
||||
},
|
||||
});
|
||||
expect(responseFromTo.statusCode).toBe(200);
|
||||
expect(responseFromTo.json()).toStrictEqual({
|
||||
results: [
|
||||
{
|
||||
block_height: '778005',
|
||||
block_hash: bh,
|
||||
inscription_count: '2',
|
||||
inscription_count_accum: '5',
|
||||
timestamp: ts,
|
||||
},
|
||||
{
|
||||
block_height: '778002',
|
||||
block_hash: bh,
|
||||
inscription_count: '1',
|
||||
inscription_count_accum: '3',
|
||||
timestamp: ts,
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
127
api/ordinals/tests/api/status.test.ts
Normal file
127
api/ordinals/tests/api/status.test.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { buildApiServer } from '../../src/api/init';
|
||||
import { Brc20PgStore } from '../../src/pg/brc20/brc20-pg-store';
|
||||
import { PgStore } from '../../src/pg/pg-store';
|
||||
import {
|
||||
TestFastifyServer,
|
||||
ORDINALS_MIGRATIONS_DIR,
|
||||
BRC20_MIGRATIONS_DIR,
|
||||
clearDb,
|
||||
runMigrations,
|
||||
inscriptionReveal,
|
||||
updateTestChainTip,
|
||||
} from '../helpers';
|
||||
|
||||
describe('Status', () => {
|
||||
let db: PgStore;
|
||||
let brc20Db: Brc20PgStore;
|
||||
let fastify: TestFastifyServer;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await PgStore.connect();
|
||||
await runMigrations(db.sql, ORDINALS_MIGRATIONS_DIR);
|
||||
brc20Db = await Brc20PgStore.connect();
|
||||
await runMigrations(brc20Db.sql, BRC20_MIGRATIONS_DIR);
|
||||
fastify = await buildApiServer({ db, brc20Db });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fastify.close();
|
||||
await clearDb(db.sql);
|
||||
await db.close();
|
||||
await clearDb(brc20Db.sql);
|
||||
await brc20Db.close();
|
||||
});
|
||||
|
||||
test('returns status when db is empty', async () => {
|
||||
const response = await fastify.inject({ method: 'GET', url: '/ordinals/v1/' });
|
||||
const json = response.json();
|
||||
expect(json).toStrictEqual({
|
||||
server_version: 'ordinals-api v0.0.1 (test:123456)',
|
||||
status: 'ready',
|
||||
block_height: 0,
|
||||
});
|
||||
const noVersionResponse = await fastify.inject({ method: 'GET', url: '/ordinals/' });
|
||||
expect(response.statusCode).toEqual(noVersionResponse.statusCode);
|
||||
expect(json).toStrictEqual(noVersionResponse.json());
|
||||
});
|
||||
|
||||
test('returns inscriptions total', async () => {
|
||||
await inscriptionReveal(db.sql, {
|
||||
inscription_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
|
||||
ordinal_number: '257418248345364',
|
||||
number: '0',
|
||||
classic_number: '0',
|
||||
block_height: '775617',
|
||||
block_hash: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d',
|
||||
tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc',
|
||||
tx_index: 0,
|
||||
address: 'bc1p3cyx5e2hgh53w7kpxcvm8s4kkega9gv5wfw7c4qxsvxl0u8x834qf0u2td',
|
||||
mime_type: 'text/plain',
|
||||
content_type: 'text/plain;charset=utf-8',
|
||||
content_length: 5,
|
||||
content: '0x48656C6C6F',
|
||||
fee: '2805',
|
||||
curse_type: null,
|
||||
recursive: false,
|
||||
input_index: 0,
|
||||
pointer: null,
|
||||
metadata: null,
|
||||
metaprotocol: null,
|
||||
parent: null,
|
||||
delegate: null,
|
||||
timestamp: 1676913207000,
|
||||
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
|
||||
offset: '0',
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
value: '10000',
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
coinbase_height: '650000',
|
||||
});
|
||||
await inscriptionReveal(db.sql, {
|
||||
inscription_id: 'a98d7055a77fa0b96cc31e30bb8bacf777382d1b67f1b7eca6f2014e961591c8i0',
|
||||
ordinal_number: '257418248345364',
|
||||
number: '-2',
|
||||
classic_number: '-2',
|
||||
block_height: '791975',
|
||||
block_hash: '6c3f7e89a7b6d5f4e3a2c1b09876e5d4c3b2a1908765e4d3c2b1a09f8e7d6c5b',
|
||||
tx_id: 'a98d7055a77fa0b96cc31e30bb8bacf777382d1b67f1b7eca6f2014e961591c8',
|
||||
tx_index: 0,
|
||||
address: 'bc1pk6y72s45lcaurfwxrjyg7cf9xa9ezzuc8f5hhhzhtvhe5fgygckq0t0m5f',
|
||||
mime_type: 'text/plain',
|
||||
content_type: 'text/plain;charset=utf-8',
|
||||
content_length: 5,
|
||||
content: '0x48656C6C6F',
|
||||
fee: '2805',
|
||||
curse_type: 'p2wsh',
|
||||
recursive: false,
|
||||
input_index: 0,
|
||||
pointer: null,
|
||||
metadata: null,
|
||||
metaprotocol: null,
|
||||
parent: null,
|
||||
delegate: null,
|
||||
timestamp: 1676913207000,
|
||||
output: 'a98d7055a77fa0b96cc31e30bb8bacf777382d1b67f1b7eca6f2014e961591c8:0',
|
||||
offset: '0',
|
||||
prev_output: null,
|
||||
prev_offset: null,
|
||||
value: '10000',
|
||||
transfer_type: 'transferred',
|
||||
rarity: 'common',
|
||||
coinbase_height: '650000',
|
||||
});
|
||||
await updateTestChainTip(db.sql, 791975);
|
||||
|
||||
const response = await fastify.inject({ method: 'GET', url: '/ordinals/v1/' });
|
||||
const json = response.json();
|
||||
expect(json).toStrictEqual({
|
||||
server_version: 'ordinals-api v0.0.1 (test:123456)',
|
||||
status: 'ready',
|
||||
block_height: 791975,
|
||||
max_inscription_number: 0,
|
||||
max_cursed_inscription_number: -2,
|
||||
});
|
||||
});
|
||||
});
|
||||
1403
api/ordinals/tests/brc-20/api.test.ts
Normal file
1403
api/ordinals/tests/brc-20/api.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
499
api/ordinals/tests/helpers.ts
Normal file
499
api/ordinals/tests/helpers.ts
Normal file
@@ -0,0 +1,499 @@
|
||||
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
|
||||
import { FastifyBaseLogger, FastifyInstance } from 'fastify';
|
||||
import { IncomingMessage, Server, ServerResponse } from 'http';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { PgSqlClient } from '@hirosystems/api-toolkit';
|
||||
|
||||
export const ORDINALS_MIGRATIONS_DIR = '../../migrations/ordinals';
|
||||
export const BRC20_MIGRATIONS_DIR = '../../migrations/ordinals-brc20';
|
||||
|
||||
/// Runs SQL migrations based on the Rust `refinery` crate standard.
|
||||
export async function runMigrations(sql: PgSqlClient, directory: string) {
|
||||
const files = fs.readdirSync(directory);
|
||||
const sqlFiles = files
|
||||
.filter(file => path.extname(file).toLowerCase() === '.sql')
|
||||
.map(file => path.join(directory, file))
|
||||
.sort((a, b) => {
|
||||
const numA = parseInt(a.match(/\d+/)?.toString() || '0', 10);
|
||||
const numB = parseInt(b.match(/\d+/)?.toString() || '0', 10);
|
||||
return numA - numB;
|
||||
});
|
||||
for (const sqlFile of sqlFiles) await sql.file(sqlFile);
|
||||
return sqlFiles;
|
||||
}
|
||||
|
||||
/// Drops all tables and types from a test DB. Equivalent to a migration rollback, which are
|
||||
/// unsupported by the `refinery` crate.
|
||||
export async function clearDb(sql: PgSqlClient) {
|
||||
await sql`
|
||||
DO $$ DECLARE
|
||||
r RECORD;
|
||||
BEGIN
|
||||
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP
|
||||
EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE';
|
||||
END LOOP;
|
||||
END $$;
|
||||
`;
|
||||
await sql`
|
||||
DO $$ DECLARE
|
||||
r RECORD;
|
||||
BEGIN
|
||||
FOR r IN (SELECT typname FROM pg_type WHERE typtype = 'e' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = current_schema())) LOOP
|
||||
EXECUTE 'DROP TYPE IF EXISTS ' || quote_ident(r.typname) || ' CASCADE';
|
||||
END LOOP;
|
||||
END $$;
|
||||
`;
|
||||
}
|
||||
|
||||
export type TestFastifyServer = FastifyInstance<
|
||||
Server,
|
||||
IncomingMessage,
|
||||
ServerResponse,
|
||||
FastifyBaseLogger,
|
||||
TypeBoxTypeProvider
|
||||
>;
|
||||
|
||||
type TestOrdinalsInscriptionsRow = {
|
||||
inscription_id: string;
|
||||
ordinal_number: string;
|
||||
number: string;
|
||||
classic_number: string;
|
||||
block_height: string;
|
||||
block_hash: string;
|
||||
tx_id: string;
|
||||
tx_index: number;
|
||||
address: string | null;
|
||||
mime_type: string;
|
||||
content_type: string;
|
||||
content_length: number;
|
||||
content: string;
|
||||
fee: string;
|
||||
curse_type: string | null;
|
||||
recursive: boolean;
|
||||
input_index: number;
|
||||
pointer: string | null;
|
||||
metadata: string | null;
|
||||
metaprotocol: string | null;
|
||||
parent: string | null;
|
||||
delegate: string | null;
|
||||
timestamp: number;
|
||||
};
|
||||
async function insertTestInscription(sql: PgSqlClient, row: TestOrdinalsInscriptionsRow) {
|
||||
await sql`INSERT INTO inscriptions ${sql(row)}`;
|
||||
}
|
||||
|
||||
type TestOrdinalsLocationsRow = {
|
||||
ordinal_number: string;
|
||||
block_height: string;
|
||||
tx_index: number;
|
||||
tx_id: string;
|
||||
block_hash: string;
|
||||
address: string | null;
|
||||
output: string;
|
||||
offset: string | null;
|
||||
prev_output: string | null;
|
||||
prev_offset: string | null;
|
||||
value: string | null;
|
||||
transfer_type: string;
|
||||
timestamp: number;
|
||||
};
|
||||
async function insertTestLocation(sql: PgSqlClient, row: TestOrdinalsLocationsRow) {
|
||||
await sql`
|
||||
INSERT INTO locations ${sql(row)}
|
||||
ON CONFLICT (ordinal_number, block_height, tx_index) DO NOTHING
|
||||
`;
|
||||
}
|
||||
|
||||
type TestOrdinalsCurrentLocationsRow = {
|
||||
ordinal_number: string;
|
||||
block_height: string;
|
||||
tx_id: string;
|
||||
tx_index: number;
|
||||
address: string;
|
||||
output: string;
|
||||
offset: string | null;
|
||||
};
|
||||
async function insertTestCurrentLocation(sql: PgSqlClient, row: TestOrdinalsCurrentLocationsRow) {
|
||||
await sql`
|
||||
INSERT INTO current_locations ${sql(row)}
|
||||
ON CONFLICT (ordinal_number) DO UPDATE SET
|
||||
block_height = EXCLUDED.block_height,
|
||||
tx_id = EXCLUDED.tx_id,
|
||||
tx_index = EXCLUDED.tx_index,
|
||||
address = EXCLUDED.address,
|
||||
output = EXCLUDED.output,
|
||||
\"offset\" = EXCLUDED.\"offset\"
|
||||
`;
|
||||
}
|
||||
|
||||
type TestOrdinalsSatoshisRow = {
|
||||
ordinal_number: string;
|
||||
rarity: string;
|
||||
coinbase_height: string;
|
||||
};
|
||||
async function insertTestSatoshi(sql: PgSqlClient, row: TestOrdinalsSatoshisRow) {
|
||||
await sql`
|
||||
INSERT INTO satoshis ${sql(row)}
|
||||
ON CONFLICT (ordinal_number) DO NOTHING
|
||||
`;
|
||||
}
|
||||
|
||||
type TestOrdinalsInscriptionTransfersRow = {
|
||||
inscription_id: string;
|
||||
number: string;
|
||||
ordinal_number: string;
|
||||
block_height: string;
|
||||
tx_index: number;
|
||||
from_block_height: string;
|
||||
from_tx_index: number;
|
||||
block_transfer_index: number;
|
||||
};
|
||||
async function insertTestInscriptionTransfer(
|
||||
sql: PgSqlClient,
|
||||
row: TestOrdinalsInscriptionTransfersRow
|
||||
) {
|
||||
await sql`INSERT INTO inscription_transfers ${sql(row)}`;
|
||||
}
|
||||
|
||||
type TestOrdinalsCountsByBlockRow = {
|
||||
block_height: string;
|
||||
block_hash: string;
|
||||
inscription_count: number;
|
||||
inscription_count_accum: number;
|
||||
timestamp: number;
|
||||
};
|
||||
export async function insertTestCountsByBlock(sql: PgSqlClient, row: TestOrdinalsCountsByBlockRow) {
|
||||
await sql`
|
||||
INSERT INTO counts_by_block ${sql(row)}
|
||||
ON CONFLICT (block_height) DO UPDATE SET
|
||||
inscription_count = counts_by_block.inscription_count + EXCLUDED.inscription_count,
|
||||
inscription_count_accum = counts_by_block.inscription_count_accum + EXCLUDED.inscription_count_accum
|
||||
`;
|
||||
}
|
||||
|
||||
type TestOrdinalsInscriptionRecursionsRow = {
|
||||
inscription_id: string;
|
||||
ref_inscription_id: string;
|
||||
};
|
||||
export async function insertTestInscriptionRecursion(
|
||||
sql: PgSqlClient,
|
||||
row: TestOrdinalsInscriptionRecursionsRow
|
||||
) {
|
||||
await sql`INSERT INTO inscription_recursions ${sql(row)}`;
|
||||
}
|
||||
|
||||
export async function updateTestChainTip(sql: PgSqlClient, blockHeight: number) {
|
||||
await sql`UPDATE chain_tip SET block_height = ${blockHeight}`;
|
||||
}
|
||||
|
||||
type TestOrdinalsInscriptionReveal = TestOrdinalsInscriptionsRow &
|
||||
TestOrdinalsLocationsRow &
|
||||
TestOrdinalsSatoshisRow &
|
||||
TestOrdinalsCurrentLocationsRow;
|
||||
export async function inscriptionReveal(sql: PgSqlClient, reveal: TestOrdinalsInscriptionReveal) {
|
||||
await insertTestSatoshi(sql, {
|
||||
ordinal_number: reveal.ordinal_number,
|
||||
rarity: reveal.rarity,
|
||||
coinbase_height: reveal.coinbase_height,
|
||||
});
|
||||
await insertTestInscription(sql, {
|
||||
inscription_id: reveal.inscription_id,
|
||||
ordinal_number: reveal.ordinal_number,
|
||||
number: reveal.number,
|
||||
classic_number: reveal.classic_number,
|
||||
block_height: reveal.block_height,
|
||||
block_hash: reveal.block_hash,
|
||||
tx_id: reveal.tx_id,
|
||||
tx_index: reveal.tx_index,
|
||||
address: reveal.address,
|
||||
mime_type: reveal.mime_type,
|
||||
content_type: reveal.content_type,
|
||||
content_length: reveal.content_length,
|
||||
content: reveal.content,
|
||||
fee: reveal.fee,
|
||||
curse_type: reveal.curse_type,
|
||||
recursive: reveal.recursive,
|
||||
input_index: reveal.input_index,
|
||||
pointer: reveal.pointer,
|
||||
metadata: reveal.metadata,
|
||||
metaprotocol: reveal.metaprotocol,
|
||||
parent: reveal.parent,
|
||||
delegate: reveal.delegate,
|
||||
timestamp: reveal.timestamp,
|
||||
});
|
||||
await insertTestLocation(sql, {
|
||||
ordinal_number: reveal.ordinal_number,
|
||||
block_height: reveal.block_height,
|
||||
tx_index: reveal.tx_index,
|
||||
tx_id: reveal.tx_id,
|
||||
block_hash: reveal.block_hash,
|
||||
address: reveal.address,
|
||||
output: reveal.output,
|
||||
offset: reveal.offset,
|
||||
prev_output: reveal.prev_output,
|
||||
prev_offset: reveal.prev_offset,
|
||||
value: reveal.value,
|
||||
transfer_type: reveal.transfer_type,
|
||||
timestamp: reveal.timestamp,
|
||||
});
|
||||
await insertTestCurrentLocation(sql, {
|
||||
ordinal_number: reveal.ordinal_number,
|
||||
block_height: reveal.block_height,
|
||||
tx_index: reveal.tx_index,
|
||||
tx_id: reveal.tx_id,
|
||||
address: reveal.address,
|
||||
output: reveal.output,
|
||||
offset: reveal.offset,
|
||||
});
|
||||
await insertTestCountsByBlock(sql, {
|
||||
block_height: reveal.block_height,
|
||||
block_hash: reveal.block_hash,
|
||||
inscription_count: 1,
|
||||
inscription_count_accum: 1,
|
||||
timestamp: reveal.timestamp,
|
||||
});
|
||||
await sql`
|
||||
INSERT INTO counts_by_mime_type ${sql({ mime_type: reveal.mime_type, count: 1 })}
|
||||
ON CONFLICT (mime_type) DO UPDATE SET
|
||||
count = counts_by_mime_type.count + EXCLUDED.count
|
||||
`;
|
||||
await sql`
|
||||
INSERT INTO counts_by_sat_rarity ${sql({ rarity: reveal.rarity, count: 1 })}
|
||||
ON CONFLICT (rarity) DO UPDATE SET
|
||||
count = counts_by_sat_rarity.count + EXCLUDED.count
|
||||
`;
|
||||
await sql`
|
||||
INSERT INTO counts_by_type ${sql({
|
||||
type: parseInt(reveal.classic_number) >= 0 ? 'blessed' : 'cursed',
|
||||
count: 1,
|
||||
})}
|
||||
ON CONFLICT (type) DO UPDATE SET
|
||||
count = counts_by_type.count + EXCLUDED.count
|
||||
`;
|
||||
await sql`
|
||||
INSERT INTO counts_by_address ${sql({ address: reveal.address, count: 1 })}
|
||||
ON CONFLICT (address) DO UPDATE SET
|
||||
count = counts_by_address.count + EXCLUDED.count
|
||||
`;
|
||||
await sql`
|
||||
INSERT INTO counts_by_genesis_address ${sql({ address: reveal.address, count: 1 })}
|
||||
ON CONFLICT (address) DO UPDATE SET
|
||||
count = counts_by_genesis_address.count + EXCLUDED.count
|
||||
`;
|
||||
await sql`
|
||||
INSERT INTO counts_by_recursive ${sql({ recursive: reveal.recursive, count: 1 })}
|
||||
ON CONFLICT (recursive) DO UPDATE SET
|
||||
count = counts_by_recursive.count + EXCLUDED.count
|
||||
`;
|
||||
}
|
||||
|
||||
type TestOrdinalsInscriptionTransfer = TestOrdinalsLocationsRow &
|
||||
TestOrdinalsCurrentLocationsRow &
|
||||
TestOrdinalsInscriptionTransfersRow;
|
||||
export async function inscriptionTransfer(
|
||||
sql: PgSqlClient,
|
||||
transfer: TestOrdinalsInscriptionTransfer
|
||||
) {
|
||||
await insertTestLocation(sql, {
|
||||
ordinal_number: transfer.ordinal_number,
|
||||
block_height: transfer.block_height,
|
||||
tx_index: transfer.tx_index,
|
||||
tx_id: transfer.tx_id,
|
||||
block_hash: transfer.block_hash,
|
||||
address: transfer.address,
|
||||
output: transfer.output,
|
||||
offset: transfer.offset,
|
||||
prev_output: transfer.prev_output,
|
||||
prev_offset: transfer.prev_offset,
|
||||
value: transfer.value,
|
||||
transfer_type: transfer.transfer_type,
|
||||
timestamp: transfer.timestamp,
|
||||
});
|
||||
await insertTestCurrentLocation(sql, {
|
||||
ordinal_number: transfer.ordinal_number,
|
||||
block_height: transfer.block_height,
|
||||
tx_index: transfer.tx_index,
|
||||
tx_id: transfer.tx_id,
|
||||
address: transfer.address,
|
||||
output: transfer.output,
|
||||
offset: transfer.offset,
|
||||
});
|
||||
await insertTestInscriptionTransfer(sql, {
|
||||
inscription_id: transfer.inscription_id,
|
||||
number: transfer.number,
|
||||
ordinal_number: transfer.ordinal_number,
|
||||
block_height: transfer.block_height,
|
||||
tx_index: transfer.tx_index,
|
||||
from_block_height: transfer.from_block_height,
|
||||
from_tx_index: transfer.from_tx_index,
|
||||
block_transfer_index: transfer.block_transfer_index,
|
||||
});
|
||||
}
|
||||
|
||||
type TestBrc20OperationsRow = {
|
||||
ticker: string;
|
||||
operation: string;
|
||||
inscription_id: string;
|
||||
inscription_number: string;
|
||||
ordinal_number: string;
|
||||
block_height: string;
|
||||
block_hash: string;
|
||||
tx_id: string;
|
||||
tx_index: number;
|
||||
output: string;
|
||||
offset: string;
|
||||
timestamp: number;
|
||||
address: string;
|
||||
to_address: string | null;
|
||||
amount: string;
|
||||
};
|
||||
type TestBrc20TokensRow = {
|
||||
ticker: string;
|
||||
display_ticker: string;
|
||||
inscription_id: string;
|
||||
inscription_number: string;
|
||||
block_height: string;
|
||||
block_hash: string;
|
||||
tx_id: string;
|
||||
tx_index: number;
|
||||
address: string;
|
||||
max: string;
|
||||
limit: string;
|
||||
decimals: number;
|
||||
self_mint: boolean;
|
||||
minted_supply: string;
|
||||
tx_count: number;
|
||||
timestamp: number;
|
||||
};
|
||||
type TestBrc20BalancesRow = {
|
||||
ticker: string;
|
||||
address: string;
|
||||
avail_balance: string;
|
||||
trans_balance: string;
|
||||
total_balance: string;
|
||||
};
|
||||
|
||||
type TestBrc20TokenDeploy = TestBrc20TokensRow & TestBrc20OperationsRow;
|
||||
export async function brc20TokenDeploy(sql: PgSqlClient, deploy: TestBrc20TokenDeploy) {
|
||||
const token: TestBrc20TokensRow = {
|
||||
ticker: deploy.ticker,
|
||||
display_ticker: deploy.display_ticker,
|
||||
inscription_id: deploy.inscription_id,
|
||||
inscription_number: deploy.inscription_number,
|
||||
block_height: deploy.block_height,
|
||||
block_hash: deploy.block_hash,
|
||||
tx_id: deploy.tx_id,
|
||||
tx_index: deploy.tx_index,
|
||||
address: deploy.address,
|
||||
max: deploy.max,
|
||||
limit: deploy.limit,
|
||||
decimals: deploy.decimals,
|
||||
self_mint: deploy.self_mint,
|
||||
minted_supply: deploy.minted_supply,
|
||||
tx_count: deploy.tx_count,
|
||||
timestamp: deploy.timestamp,
|
||||
};
|
||||
await sql`INSERT INTO tokens ${sql(token)}`;
|
||||
const op: TestBrc20OperationsRow = {
|
||||
ticker: deploy.ticker,
|
||||
operation: 'deploy',
|
||||
inscription_id: deploy.inscription_id,
|
||||
inscription_number: deploy.inscription_number,
|
||||
ordinal_number: deploy.ordinal_number,
|
||||
block_height: deploy.block_height,
|
||||
block_hash: deploy.block_hash,
|
||||
tx_id: deploy.tx_id,
|
||||
tx_index: deploy.tx_index,
|
||||
output: deploy.output,
|
||||
offset: deploy.offset,
|
||||
timestamp: deploy.timestamp,
|
||||
address: deploy.address,
|
||||
to_address: deploy.to_address,
|
||||
amount: deploy.amount,
|
||||
};
|
||||
await sql`INSERT INTO operations ${sql(op)}`;
|
||||
await sql`
|
||||
INSERT INTO counts_by_operation ${sql({ operation: 'deploy', count: 1 })}
|
||||
ON CONFLICT (operation) DO UPDATE SET
|
||||
count = counts_by_operation.count + EXCLUDED.count
|
||||
`;
|
||||
await sql`
|
||||
INSERT INTO counts_by_address_operation ${sql({
|
||||
address: deploy.address,
|
||||
operation: 'deploy',
|
||||
count: 1,
|
||||
})}
|
||||
ON CONFLICT (address, operation) DO UPDATE SET
|
||||
count = counts_by_address_operation.count + EXCLUDED.count
|
||||
`;
|
||||
}
|
||||
|
||||
export async function brc20Operation(sql: PgSqlClient, operation: TestBrc20OperationsRow) {
|
||||
await sql`INSERT INTO operations ${sql(operation)}`;
|
||||
if (operation.operation != 'transfer_receive') {
|
||||
await sql`UPDATE tokens SET tx_count = tx_count + 1 WHERE ticker = ${operation.ticker}`;
|
||||
}
|
||||
await sql`
|
||||
INSERT INTO counts_by_operation ${sql({ operation: operation.operation, count: 1 })}
|
||||
ON CONFLICT (operation) DO UPDATE SET
|
||||
count = counts_by_operation.count + EXCLUDED.count
|
||||
`;
|
||||
await sql`
|
||||
INSERT INTO counts_by_address_operation ${sql({
|
||||
address: operation.address,
|
||||
operation: operation.operation,
|
||||
count: 1,
|
||||
})}
|
||||
ON CONFLICT (address, operation) DO UPDATE SET
|
||||
count = counts_by_address_operation.count + EXCLUDED.count
|
||||
`;
|
||||
const balance: TestBrc20BalancesRow = {
|
||||
ticker: operation.ticker,
|
||||
address: operation.address,
|
||||
avail_balance: '0',
|
||||
trans_balance: '0',
|
||||
total_balance: '0',
|
||||
};
|
||||
switch (operation.operation) {
|
||||
case 'mint':
|
||||
case 'transfer_receive':
|
||||
balance.avail_balance = operation.amount;
|
||||
balance.total_balance = operation.amount;
|
||||
break;
|
||||
case 'transfer':
|
||||
balance.avail_balance = `-${operation.amount}`;
|
||||
balance.trans_balance = operation.amount;
|
||||
break;
|
||||
case 'transfer_send':
|
||||
balance.trans_balance = `-${operation.amount}`;
|
||||
balance.total_balance = `-${operation.amount}`;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
await sql`
|
||||
INSERT INTO balances ${sql(balance)}
|
||||
ON CONFLICT (ticker, address) DO UPDATE SET
|
||||
avail_balance = balances.avail_balance + EXCLUDED.avail_balance,
|
||||
trans_balance = balances.trans_balance + EXCLUDED.trans_balance,
|
||||
total_balance = balances.avail_balance + EXCLUDED.total_balance
|
||||
`;
|
||||
}
|
||||
|
||||
/** Generate a random hash like string for testing */
|
||||
export const randomHash = () =>
|
||||
[...Array(64)].map(() => Math.floor(Math.random() * 16).toString(16)).join('');
|
||||
|
||||
/** Generator for incrementing numbers */
|
||||
export function* incrementing(
|
||||
start: number = 0,
|
||||
step: number = 1
|
||||
): Generator<number, number, 'next'> {
|
||||
let current = start;
|
||||
|
||||
while (true) {
|
||||
yield current;
|
||||
current += step;
|
||||
}
|
||||
}
|
||||
17
api/ordinals/tests/setup.ts
Normal file
17
api/ordinals/tests/setup.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
// ts-unused-exports:disable-next-line
|
||||
export default (): void => {
|
||||
process.env.API_HOST = '0.0.0.0';
|
||||
process.env.API_PORT = '3000';
|
||||
process.env.ORDINALS_PGHOST = '127.0.0.1';
|
||||
process.env.ORDINALS_PGPORT = '5432';
|
||||
process.env.ORDINALS_PGUSER = 'postgres';
|
||||
process.env.ORDINALS_PGPASSWORD = 'postgres';
|
||||
process.env.ORDINALS_PGDATABASE = 'postgres';
|
||||
process.env.ORDINALS_SCHEMA = 'public';
|
||||
process.env.BRC20_PGHOST = '127.0.0.1';
|
||||
process.env.BRC20_PGPORT = '5432';
|
||||
process.env.BRC20_PGUSER = 'postgres';
|
||||
process.env.BRC20_PGPASSWORD = 'postgres';
|
||||
process.env.BRC20_PGDATABASE = 'postgres';
|
||||
process.env.BRC20_SCHEMA = 'public';
|
||||
};
|
||||
6
api/ordinals/tsconfig.build.json
Normal file
6
api/ordinals/tsconfig.build.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"exclude": [
|
||||
"tests/**/*.ts",
|
||||
]
|
||||
}
|
||||
113
api/ordinals/tsconfig.json
Normal file
113
api/ordinals/tsconfig.json
Normal file
@@ -0,0 +1,113 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig to read more about this file */
|
||||
|
||||
/* Projects */
|
||||
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
|
||||
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
|
||||
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
|
||||
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
|
||||
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
|
||||
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "es2021" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
|
||||
"lib": [
|
||||
"es2021"
|
||||
] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
|
||||
// "jsx": "preserve", /* Specify what JSX code is generated. */
|
||||
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
||||
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
|
||||
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
|
||||
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
|
||||
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
|
||||
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
|
||||
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
|
||||
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
|
||||
|
||||
/* Modules */
|
||||
"module": "commonjs" /* Specify what module code is generated. */,
|
||||
// "rootDir": "./", /* Specify the root folder within your source files. */
|
||||
"moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */,
|
||||
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
|
||||
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
|
||||
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
||||
"typeRoots": [
|
||||
"./src/@types",
|
||||
"./node_modules/@types"
|
||||
] /* Specify multiple folders that act like './node_modules/@types'. */,
|
||||
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
|
||||
// "resolveJsonModule": true, /* Enable importing .json files. */
|
||||
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
|
||||
|
||||
/* JavaScript Support */
|
||||
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
|
||||
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
|
||||
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
|
||||
|
||||
/* Emit */
|
||||
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
|
||||
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
|
||||
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
|
||||
"sourceMap": true /* Create source map files for emitted JavaScript files. */,
|
||||
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
|
||||
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
|
||||
// "removeComments": true, /* Disable emitting comments. */
|
||||
// "noEmit": true, /* Disable emitting files from a compilation. */
|
||||
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
|
||||
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
|
||||
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
|
||||
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
|
||||
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
|
||||
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
|
||||
// "newLine": "crlf", /* Set the newline character for emitting files. */
|
||||
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
|
||||
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
|
||||
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
|
||||
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
|
||||
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
|
||||
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
|
||||
|
||||
/* Interop Constraints */
|
||||
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
|
||||
"allowSyntheticDefaultImports": false /* Allow 'import x from y' when a module doesn't have a default export. */,
|
||||
"esModuleInterop": false /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
|
||||
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
|
||||
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
|
||||
|
||||
/* Type Checking */
|
||||
"strict": true /* Enable all strict type-checking options. */,
|
||||
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
|
||||
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
|
||||
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
|
||||
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
|
||||
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
|
||||
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
|
||||
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
|
||||
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
|
||||
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
|
||||
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
|
||||
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
|
||||
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
|
||||
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
|
||||
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
|
||||
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
|
||||
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
|
||||
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
|
||||
|
||||
/* Completeness */
|
||||
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
|
||||
"skipLibCheck": true /* Skip type checking all .d.ts files. */
|
||||
},
|
||||
"include": [
|
||||
"./src/**/*.ts",
|
||||
"./tests/**/*.ts",
|
||||
"./util/**/*.ts"
|
||||
],
|
||||
}
|
||||
33
api/ordinals/util/openapi-generator.ts
Normal file
33
api/ordinals/util/openapi-generator.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import Fastify, { FastifyPluginAsync } from 'fastify';
|
||||
import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox';
|
||||
import { Api } from '../src/api/init';
|
||||
import FastifySwagger from '@fastify/swagger';
|
||||
import { existsSync, mkdirSync, writeFileSync } from 'fs';
|
||||
import { Server } from 'http';
|
||||
import { OpenApiSchemaOptions } from '../src/api/schemas';
|
||||
|
||||
/**
|
||||
* Generates `openapi.yaml` based on current Swagger definitions.
|
||||
*/
|
||||
export const ApiGenerator: FastifyPluginAsync<
|
||||
Record<never, never>,
|
||||
Server,
|
||||
TypeBoxTypeProvider
|
||||
> = async (fastify, options) => {
|
||||
await fastify.register(FastifySwagger, OpenApiSchemaOptions);
|
||||
await fastify.register(Api, { prefix: '/ordinals/v1' });
|
||||
if (!existsSync('./tmp')) {
|
||||
mkdirSync('./tmp');
|
||||
}
|
||||
writeFileSync('./tmp/openapi.yaml', fastify.swagger({ yaml: true }));
|
||||
writeFileSync('./tmp/openapi.json', JSON.stringify(fastify.swagger(), null, 2));
|
||||
};
|
||||
|
||||
const fastify = Fastify({
|
||||
trustProxy: true,
|
||||
logger: true,
|
||||
}).withTypeProvider<TypeBoxTypeProvider>();
|
||||
|
||||
void fastify.register(ApiGenerator).then(async () => {
|
||||
await fastify.close();
|
||||
});
|
||||
5
api/ordinals/vercel.json
Normal file
5
api/ordinals/vercel.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"git": {
|
||||
"deploymentEnabled": false
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,7 @@ use ordhook::db::blocks::{
|
||||
open_blocks_db_with_retry, open_readonly_blocks_db,
|
||||
};
|
||||
use ordhook::db::cursor::BlockBytesCursor;
|
||||
use ordhook::db::migrate_dbs;
|
||||
use ordhook::db::{migrate_dbs, reset_dbs};
|
||||
use ordhook::service::Service;
|
||||
use ordhook::try_info;
|
||||
use std::collections::HashSet;
|
||||
@@ -43,9 +43,29 @@ enum Command {
|
||||
/// Stream Bitcoin blocks and index ordinals inscriptions and transfers
|
||||
#[clap(subcommand)]
|
||||
Service(ServiceCommand),
|
||||
/// Perform maintenance operations on local databases
|
||||
/// Perform maintenance operations on local index
|
||||
#[clap(subcommand)]
|
||||
Db(OrdhookDbCommand),
|
||||
Index(IndexCommand),
|
||||
/// Database operations
|
||||
#[clap(subcommand)]
|
||||
Database(DatabaseCommand),
|
||||
}
|
||||
|
||||
#[derive(Subcommand, PartialEq, Clone, Debug)]
|
||||
enum DatabaseCommand {
|
||||
/// Migrates database
|
||||
#[clap(name = "migrate", bin_name = "migrate")]
|
||||
Migrate(DatabaseMigrateCommand),
|
||||
/// Resets database to an empty state
|
||||
#[clap(name = "reset", bin_name = "reset")]
|
||||
Reset(DatabaseMigrateCommand),
|
||||
}
|
||||
|
||||
#[derive(Parser, PartialEq, Clone, Debug)]
|
||||
struct DatabaseMigrateCommand {
|
||||
/// Load config file path
|
||||
#[clap(long = "config-path")]
|
||||
pub config_path: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Subcommand, PartialEq, Clone, Debug)]
|
||||
@@ -184,7 +204,7 @@ struct StartCommand {
|
||||
}
|
||||
|
||||
#[derive(Subcommand, PartialEq, Clone, Debug)]
|
||||
enum OrdhookDbCommand {
|
||||
enum IndexCommand {
|
||||
/// Initialize a new ordhook db
|
||||
#[clap(name = "new", bin_name = "new")]
|
||||
New(SyncOrdhookDbCommand),
|
||||
@@ -324,18 +344,18 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
println!("Created file Ordhook.toml");
|
||||
}
|
||||
},
|
||||
Command::Db(OrdhookDbCommand::New(cmd)) => {
|
||||
Command::Index(IndexCommand::New(cmd)) => {
|
||||
let config = ConfigFile::default(false, false, false, &cmd.config_path, &None)?;
|
||||
migrate_dbs(&config, ctx).await?;
|
||||
open_blocks_db_with_retry(true, &config, ctx);
|
||||
}
|
||||
Command::Db(OrdhookDbCommand::Sync(cmd)) => {
|
||||
Command::Index(IndexCommand::Sync(cmd)) => {
|
||||
let config = ConfigFile::default(false, false, false, &cmd.config_path, &None)?;
|
||||
migrate_dbs(&config, ctx).await?;
|
||||
let service = Service::new(&config, ctx);
|
||||
service.catch_up_to_bitcoin_chain_tip().await?;
|
||||
}
|
||||
Command::Db(OrdhookDbCommand::Repair(subcmd)) => match subcmd {
|
||||
Command::Index(IndexCommand::Repair(subcmd)) => match subcmd {
|
||||
RepairCommand::Blocks(cmd) => {
|
||||
let mut config = ConfigFile::default(false, false, false, &cmd.config_path, &None)?;
|
||||
if let Some(network_threads) = cmd.network_threads {
|
||||
@@ -369,7 +389,7 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
}
|
||||
}
|
||||
},
|
||||
Command::Db(OrdhookDbCommand::Check(cmd)) => {
|
||||
Command::Index(IndexCommand::Check(cmd)) => {
|
||||
let config = ConfigFile::default(false, false, false, &cmd.config_path, &None)?;
|
||||
{
|
||||
let blocks_db = open_readonly_blocks_db(&config, ctx)?;
|
||||
@@ -379,7 +399,7 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
println!("{:?}", missing_blocks);
|
||||
}
|
||||
}
|
||||
Command::Db(OrdhookDbCommand::Drop(cmd)) => {
|
||||
Command::Index(IndexCommand::Drop(cmd)) => {
|
||||
let config = ConfigFile::default(false, false, false, &cmd.config_path, &None)?;
|
||||
|
||||
let service = Service::new(&config, ctx);
|
||||
@@ -401,6 +421,22 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
service.rollback(&block_heights).await?;
|
||||
println!("{} blocks dropped", cmd.blocks);
|
||||
}
|
||||
Command::Database(DatabaseCommand::Migrate(cmd)) => {
|
||||
let config = ConfigFile::default(false, false, false, &cmd.config_path, &None)?;
|
||||
migrate_dbs(&config, ctx).await?;
|
||||
}
|
||||
Command::Database(DatabaseCommand::Reset(cmd)) => {
|
||||
let config = ConfigFile::default(false, false, false, &cmd.config_path, &None)?;
|
||||
println!(
|
||||
"WARNING: This operation will delete ALL index data and cannot be undone. Confirm? [Y/n]"
|
||||
);
|
||||
let mut buffer = String::new();
|
||||
std::io::stdin().read_line(&mut buffer).unwrap();
|
||||
if buffer.to_lowercase().starts_with('n') {
|
||||
return Err("Aborted".to_string());
|
||||
}
|
||||
reset_dbs(&config, ctx).await?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -593,7 +593,7 @@ mod test {
|
||||
VerifiedBrc20BalanceData, VerifiedBrc20TokenDeployData, VerifiedBrc20TransferData,
|
||||
},
|
||||
},
|
||||
db::{pg_test_clear_db, pg_test_connection, pg_test_connection_pool},
|
||||
db::{pg_reset_db, pg_test_connection, pg_test_connection_pool},
|
||||
};
|
||||
|
||||
async fn get_counts_by_operation<T: GenericClient>(client: &T) -> (i32, i32, i32, i32) {
|
||||
@@ -1031,7 +1031,7 @@ mod test {
|
||||
);
|
||||
}
|
||||
}
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -504,7 +504,7 @@ mod test {
|
||||
VerifiedBrc20TokenDeployData,
|
||||
},
|
||||
},
|
||||
db::{pg_test_clear_db, pg_test_connection, pg_test_connection_pool},
|
||||
db::{pg_reset_db, pg_test_connection, pg_test_connection_pool},
|
||||
};
|
||||
|
||||
use super::Brc20MemoryCache;
|
||||
@@ -658,7 +658,7 @@ mod test {
|
||||
)))
|
||||
);
|
||||
}
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -760,7 +760,7 @@ mod test {
|
||||
)
|
||||
.await?)
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -284,7 +284,7 @@ mod test {
|
||||
},
|
||||
test_builders::{TestBlockBuilder, TestTransactionBuilder},
|
||||
},
|
||||
db::{pg_test_clear_db, pg_test_connection, pg_test_connection_pool},
|
||||
db::{pg_reset_db, pg_test_connection, pg_test_connection_pool},
|
||||
};
|
||||
|
||||
#[tokio::test]
|
||||
@@ -469,7 +469,7 @@ mod test {
|
||||
|
||||
result
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -232,7 +232,11 @@ pub async fn verify_brc20_transfers(
|
||||
)>,
|
||||
String,
|
||||
> {
|
||||
try_debug!(ctx, "BRC-20 verifying {} ordinal transfers", transfers.len());
|
||||
try_debug!(
|
||||
ctx,
|
||||
"BRC-20 verifying {} ordinal transfers",
|
||||
transfers.len()
|
||||
);
|
||||
|
||||
// Select ordinal numbers to analyze for pending BRC20 transfers.
|
||||
let mut ordinal_numbers = vec![];
|
||||
@@ -280,7 +284,12 @@ pub async fn verify_brc20_transfers(
|
||||
receiver_address: "".to_string(),
|
||||
},
|
||||
};
|
||||
results.push((transfer_row.inscription_id, verified, (*data).clone(), (*tx_identifier).clone()));
|
||||
results.push((
|
||||
transfer_row.inscription_id,
|
||||
verified,
|
||||
(*data).clone(),
|
||||
(*tx_identifier).clone(),
|
||||
));
|
||||
}
|
||||
return Ok(results);
|
||||
}
|
||||
@@ -305,7 +314,7 @@ mod test {
|
||||
VerifiedBrc20BalanceData, VerifiedBrc20Operation, VerifiedBrc20TokenDeployData,
|
||||
},
|
||||
},
|
||||
db::{pg_test_clear_db, pg_test_connection, pg_test_connection_pool},
|
||||
db::{pg_reset_db, pg_test_connection, pg_test_connection_pool},
|
||||
};
|
||||
|
||||
use super::{verify_brc20_operation, verify_brc20_transfers, VerifiedBrc20TransferData};
|
||||
@@ -447,7 +456,7 @@ mod test {
|
||||
)
|
||||
.await
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
result
|
||||
}
|
||||
|
||||
@@ -560,7 +569,7 @@ mod test {
|
||||
)
|
||||
.await
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
result
|
||||
}
|
||||
|
||||
@@ -647,7 +656,7 @@ mod test {
|
||||
)
|
||||
.await
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
result
|
||||
}
|
||||
|
||||
@@ -724,7 +733,7 @@ mod test {
|
||||
)
|
||||
.await
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
result
|
||||
}
|
||||
|
||||
@@ -804,7 +813,7 @@ mod test {
|
||||
)
|
||||
.await
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
result
|
||||
}
|
||||
|
||||
@@ -968,7 +977,7 @@ mod test {
|
||||
)
|
||||
.await
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
result
|
||||
}
|
||||
|
||||
@@ -1095,7 +1104,7 @@ mod test {
|
||||
).await?;
|
||||
verify_brc20_transfers(&vec![(&tx, &transfer)], &mut cache, &client, &ctx).await?
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
let Some(result) = result.first() else {
|
||||
return Ok(None);
|
||||
};
|
||||
@@ -1207,7 +1216,7 @@ mod test {
|
||||
.await?;
|
||||
verify_brc20_transfers(&vec![(&tx, &transfer)], &mut cache, &client, &ctx).await?
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
let Some(result) = result.first() else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
@@ -151,7 +151,7 @@ mod test {
|
||||
core::test_builders::{TestBlockBuilder, TestTransactionBuilder},
|
||||
db::{
|
||||
ordinals_pg::{self, insert_block},
|
||||
pg_test_clear_db, pg_test_connection, pg_test_connection_pool,
|
||||
pg_reset_db, pg_test_connection, pg_test_connection_pool,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -200,7 +200,7 @@ mod test {
|
||||
|
||||
(next.classic, next.jubilee)
|
||||
};
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ use chainhook_postgres::pg_connect_with_retry;
|
||||
|
||||
use chainhook_sdk::utils::Context;
|
||||
|
||||
use crate::{config::Config, core::meta_protocols::brc20::brc20_pg, try_info};
|
||||
use crate::{config::Config, core::meta_protocols::brc20::brc20_pg, try_info, try_warn};
|
||||
|
||||
pub async fn migrate_dbs(config: &Config, ctx: &Context) -> Result<(), String> {
|
||||
{
|
||||
@@ -23,6 +23,46 @@ pub async fn migrate_dbs(config: &Config, ctx: &Context) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn reset_dbs(config: &Config, ctx: &Context) -> Result<(), String> {
|
||||
{
|
||||
try_warn!(ctx, "Resetting ordinals DB");
|
||||
let mut pg_client = pg_connect_with_retry(&config.ordinals_db).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
}
|
||||
if let (Some(brc20_db), true) = (&config.brc20_db, config.meta_protocols.brc20) {
|
||||
try_warn!(ctx, "Resetting brc20 DB");
|
||||
let mut pg_client = pg_connect_with_retry(&brc20_db).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn pg_reset_db(
|
||||
pg_client: &mut chainhook_postgres::tokio_postgres::Client,
|
||||
) -> Result<(), String> {
|
||||
pg_client
|
||||
.batch_execute(
|
||||
"
|
||||
DO $$ DECLARE
|
||||
r RECORD;
|
||||
BEGIN
|
||||
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP
|
||||
EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE';
|
||||
END LOOP;
|
||||
END $$;
|
||||
DO $$ DECLARE
|
||||
r RECORD;
|
||||
BEGIN
|
||||
FOR r IN (SELECT typname FROM pg_type WHERE typtype = 'e' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = current_schema())) LOOP
|
||||
EXECUTE 'DROP TYPE IF EXISTS ' || quote_ident(r.typname) || ' CASCADE';
|
||||
END LOOP;
|
||||
END $$;",
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("unable to reset db: {e}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn pg_test_config() -> chainhook_postgres::PgConnectionConfig {
|
||||
chainhook_postgres::PgConnectionConfig {
|
||||
@@ -43,39 +83,9 @@ pub fn pg_test_connection_pool() -> chainhook_postgres::deadpool_postgres::Pool
|
||||
|
||||
#[cfg(test)]
|
||||
pub async fn pg_test_connection() -> chainhook_postgres::tokio_postgres::Client {
|
||||
chainhook_postgres::pg_connect(&pg_test_config()).await.unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub async fn pg_test_clear_db(pg_client: &mut chainhook_postgres::tokio_postgres::Client) {
|
||||
match pg_client
|
||||
.batch_execute(
|
||||
"
|
||||
DO $$ DECLARE
|
||||
r RECORD;
|
||||
BEGIN
|
||||
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP
|
||||
EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE';
|
||||
END LOOP;
|
||||
END $$;
|
||||
DO $$ DECLARE
|
||||
r RECORD;
|
||||
BEGIN
|
||||
FOR r IN (SELECT typname FROM pg_type WHERE typtype = 'e' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = current_schema())) LOOP
|
||||
EXECUTE 'DROP TYPE IF EXISTS ' || quote_ident(r.typname) || ' CASCADE';
|
||||
END LOOP;
|
||||
END $$;",
|
||||
)
|
||||
.await {
|
||||
Ok(rows) => rows,
|
||||
Err(e) => {
|
||||
println!(
|
||||
"error rolling back test migrations: {}",
|
||||
e.to_string()
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
chainhook_postgres::pg_connect(&pg_test_config())
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Drops DB files in a test environment.
|
||||
|
||||
@@ -990,7 +990,7 @@ mod test {
|
||||
self, get_chain_tip_block_height, get_inscriptions_at_block, insert_block,
|
||||
rollback_block,
|
||||
},
|
||||
pg_test_clear_db, pg_test_connection, pg_test_connection_pool,
|
||||
pg_reset_db, pg_test_connection, pg_test_connection_pool,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1401,7 +1401,7 @@ mod test {
|
||||
assert_eq!(Some(799999), get_chain_tip_block_height(&client).await?);
|
||||
}
|
||||
}
|
||||
pg_test_clear_db(&mut pg_client).await;
|
||||
pg_reset_db(&mut pg_client).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user