mirror of
https://github.com/alexgo-io/bitcoin-indexer.git
synced 2026-01-12 22:43:06 +08:00
feat: ordhook-sdk-js refactoring (#186)
This commit is contained in:
182
.github/workflows/ci.yaml
vendored
182
.github/workflows/ci.yaml
vendored
@@ -4,36 +4,58 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- main
|
||||
tags-ignore:
|
||||
- "**"
|
||||
- feat/ordhook-sdk-js
|
||||
paths-ignore:
|
||||
- "**/CHANGELOG.md"
|
||||
- '**/CHANGELOG.md'
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-publish:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Cache cargo
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
target/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cargo test
|
||||
run: |
|
||||
rustup update
|
||||
cargo test --all
|
||||
RUST_BACKTRACE=1 cargo test --all -- --test-threads=1
|
||||
|
||||
build-publish:
|
||||
runs-on: ubuntu-latest
|
||||
needs: test
|
||||
outputs:
|
||||
docker_image_digest: ${{ steps.docker_push.outputs.digest }}
|
||||
new_release_published: ${{ steps.semantic.outputs.new_release_published }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Semantic Release
|
||||
uses: cycjimmy/semantic-release-action@v3
|
||||
uses: cycjimmy/semantic-release-action@v4
|
||||
id: semantic
|
||||
# Only run on non-PR events or only PRs that aren't from forks
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SEMANTIC_RELEASE_PACKAGE: ${{ github.event.repository.name }}
|
||||
with:
|
||||
semantic_version: 19
|
||||
@@ -42,15 +64,21 @@ jobs:
|
||||
@semantic-release/git@10.0.1
|
||||
conventional-changelog-conventionalcommits@6.1.0
|
||||
|
||||
- name: Checkout tag
|
||||
if: steps.semantic.outputs.new_release_version != ''
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: v${{ steps.semantic.outputs.new_release_version }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Docker Meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
blockstack/${{ github.event.repository.name }}
|
||||
hirosystems/${{ github.event.repository.name }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
@@ -59,18 +87,134 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ steps.semantic.outputs.new_release_version }},enable=${{ steps.semantic.outputs.new_release_version != '' }}
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Build/Tag/Push Image
|
||||
uses: docker/build-push-action@v2
|
||||
- name: Build/Push Image
|
||||
uses: docker/build-push-action@v5
|
||||
id: docker_push
|
||||
with:
|
||||
context: .
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
file: ./dockerfiles/components/ordhook.dockerfile
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
# Only push if (there's a new release on main branch, or if building a non-main branch) and (Only run on non-PR events or only PRs that aren't from forks)
|
||||
push: ${{ (github.ref != 'refs/heads/master' || steps.semantic.outputs.new_release_version != '') && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) }}
|
||||
push: ${{ (github.ref != 'refs/heads/main' || steps.semantic.outputs.new_release_version != '') && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) }}
|
||||
|
||||
deploy-dev:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s-env: [mainnet]
|
||||
needs: build-publish
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
env:
|
||||
DEPLOY_ENV: dev
|
||||
environment:
|
||||
name: Development-${{ matrix.k8s-env }}
|
||||
url: https://platform.dev.hiro.so/
|
||||
steps:
|
||||
- name: Checkout actions repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
repository: ${{ secrets.DEVOPS_ACTIONS_REPO }}
|
||||
|
||||
- name: Deploy Ordhook build to Dev ${{ matrix.k8s-env }}
|
||||
uses: ./actions/deploy
|
||||
with:
|
||||
docker_tag: ${{ needs.build-publish.outputs.docker_image_digest }}
|
||||
file_pattern: manifests/bitcoin/${{ matrix.k8s-env }}/ordhook/${{ env.DEPLOY_ENV }}/base/kustomization.yaml
|
||||
gh_token: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
auto-approve-dev:
|
||||
runs-on: ubuntu-latest
|
||||
if: needs.build-publish.outputs.new_release_published == 'true' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository)
|
||||
needs: build-publish
|
||||
steps:
|
||||
- name: Approve pending deployments
|
||||
run: |
|
||||
sleep 5
|
||||
ENV_IDS=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" | jq -r '[.[].environment.id // empty]')
|
||||
if [[ "${ENV_IDS}" != "[]" ]]; then
|
||||
curl -s -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" -d "{\"environment_ids\":${ENV_IDS},\"state\":\"approved\",\"comment\":\"auto approve\"}"
|
||||
fi
|
||||
|
||||
deploy-staging:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s-env: [mainnet]
|
||||
needs:
|
||||
- build-publish
|
||||
- deploy-dev
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
env:
|
||||
DEPLOY_ENV: stg
|
||||
environment:
|
||||
name: Staging-${{ matrix.k8s-env }}
|
||||
url: https://platform.stg.hiro.so/
|
||||
steps:
|
||||
- name: Checkout actions repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
repository: ${{ secrets.DEVOPS_ACTIONS_REPO }}
|
||||
|
||||
- name: Deploy Chainhook build to Stg ${{ matrix.k8s-env }}
|
||||
uses: ./actions/deploy
|
||||
with:
|
||||
docker_tag: ${{ needs.build-publish.outputs.docker_image_digest }}
|
||||
file_pattern: manifests/bitcoin/${{ matrix.k8s-env }}/ordhook/${{ env.DEPLOY_ENV }}/base/kustomization.yaml
|
||||
gh_token: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
auto-approve-stg:
|
||||
runs-on: ubuntu-latest
|
||||
if: needs.build-publish.outputs.new_release_published == 'true' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository)
|
||||
needs:
|
||||
- build-publish
|
||||
- deploy-dev
|
||||
steps:
|
||||
- name: Approve pending deployments
|
||||
run: |
|
||||
sleep 5
|
||||
ENV_IDS=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" | jq -r '[.[].environment.id // empty]')
|
||||
if [[ "${ENV_IDS}" != "[]" ]]; then
|
||||
curl -s -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" -d "{\"environment_ids\":${ENV_IDS},\"state\":\"approved\",\"comment\":\"auto approve\"}"
|
||||
fi
|
||||
|
||||
deploy-prod:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s-env: [mainnet,testnet]
|
||||
needs:
|
||||
- build-publish
|
||||
- deploy-staging
|
||||
if: needs.build-publish.outputs.new_release_published == 'true' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository)
|
||||
env:
|
||||
DEPLOY_ENV: prd
|
||||
environment:
|
||||
name: Production-${{ matrix.k8s-env }}
|
||||
url: https://platform.hiro.so/
|
||||
steps:
|
||||
- name: Checkout actions repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
repository: ${{ secrets.DEVOPS_ACTIONS_REPO }}
|
||||
|
||||
- name: Deploy Ordhook build to Prd ${{ matrix.k8s-env }}
|
||||
uses: ./actions/deploy
|
||||
with:
|
||||
docker_tag: ${{ needs.build-publish.outputs.docker_image_digest }}
|
||||
file_pattern: manifests/bitcoin/${{ matrix.k8s-env }}/ordhook/${{ env.DEPLOY_ENV }}/base/kustomization.yaml
|
||||
gh_token: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
518
.github/workflows/ordhook-sdk-js.yml
vendored
Normal file
518
.github/workflows/ordhook-sdk-js.yml
vendored
Normal file
@@ -0,0 +1,518 @@
|
||||
name: ordhook-sdk-js
|
||||
env:
|
||||
DEBUG: napi:*
|
||||
APP_NAME: ordhook-sdk-js
|
||||
COMPONENT_PATH: components/ordhook-sdk-js
|
||||
MACOSX_DEPLOYMENT_TARGET: '13.0'
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write
|
||||
'on':
|
||||
push:
|
||||
branches:
|
||||
- feat/ordhook-sdk-js
|
||||
tags-ignore:
|
||||
- '**'
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- LICENSE
|
||||
- '**/*.gitignore'
|
||||
- .editorconfig
|
||||
- docs/**
|
||||
pull_request: null
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
settings:
|
||||
- host: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
build: |
|
||||
yarn build
|
||||
strip -x *.node
|
||||
# - host: windows-latest
|
||||
# build: yarn build
|
||||
# target: x86_64-pc-windows-msvc
|
||||
# - host: windows-latest
|
||||
# build: |
|
||||
# rustup target add i686-pc-windows-msvc
|
||||
# yarn build --target i686-pc-windows-msvc
|
||||
# target: i686-pc-windows-msvc
|
||||
- host: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
build: |-
|
||||
sudo apt-get install libssl-dev &&
|
||||
set -e &&
|
||||
yarn --cwd components/ordhook-sdk-js build --target x86_64-unknown-linux-gnu &&
|
||||
strip -x components/ordhook-sdk-js/*.node
|
||||
# - host: ubuntu-latest
|
||||
# target: x86_64-unknown-linux-musl
|
||||
# docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
|
||||
# build: set -e && yarn --cwd components/ordhook-sdk-js build && strip components/ordhook-sdk-js/*.node
|
||||
- host: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
build: |
|
||||
rustup target add aarch64-apple-darwin
|
||||
yarn build --target aarch64-apple-darwin
|
||||
strip -x *.node
|
||||
# - host: ubuntu-latest
|
||||
# target: aarch64-unknown-linux-gnu
|
||||
# docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian-aarch64
|
||||
# build: |-
|
||||
# sudo apt-get install libssl-dev &&
|
||||
# set -e &&
|
||||
# rustup target add aarch64-unknown-linux-gnu &&
|
||||
# yarn --cwd components/ordhook-sdk-js build --target aarch64-unknown-linux-gnu &&
|
||||
# aarch64-unknown-linux-gnu-strip components/ordhook-sdk-js/*.node
|
||||
# - host: ubuntu-latest
|
||||
# target: armv7-unknown-linux-gnueabihf
|
||||
# setup: |
|
||||
# sudo apt-get update
|
||||
# sudo apt-get install gcc-arm-linux-gnueabihf -y
|
||||
# build: |
|
||||
# rustup target add armv7-unknown-linux-gnueabihf
|
||||
# yarn --cwd components/ordhook-sdk-js build --target armv7-unknown-linux-gnueabihf
|
||||
# arm-linux-gnueabihf-strip components/ordhook-sdk-js/*.node
|
||||
# - host: ubuntu-latest
|
||||
# target: aarch64-unknown-linux-musl
|
||||
# docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-alpine
|
||||
# build: |-
|
||||
# set -e &&
|
||||
# rustup target add aarch64-unknown-linux-musl &&
|
||||
# yarn --cwd components/ordhook-sdk-js build --target aarch64-unknown-linux-musl &&
|
||||
# /aarch64-linux-musl-cross/bin/aarch64-linux-musl-strip components/ordhook-sdk-js/*.node
|
||||
# - host: windows-latest
|
||||
# target: aarch64-pc-windows-msvc
|
||||
# build: |-
|
||||
# rustup target add aarch64-pc-windows-msvc
|
||||
# yarn build --target aarch64-pc-windows-msvc
|
||||
name: stable - ${{ matrix.settings.target }} - node@18
|
||||
runs-on: ${{ matrix.settings.host }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./components/ordhook-sdk-js
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
with:
|
||||
node-version: 18
|
||||
check-latest: true
|
||||
cache: yarn
|
||||
cache-dependency-path: ./components/ordhook-sdk-js/yarn.lock
|
||||
- name: Install
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: ${{ matrix.settings.target }}
|
||||
- name: Cache cargo
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
.cargo-cache
|
||||
target/
|
||||
key: ${{ matrix.settings.target }}-cargo-${{ matrix.settings.host }}
|
||||
# - uses: goto-bus-stop/setup-zig@v2
|
||||
# if: ${{ matrix.settings.target == 'armv7-unknown-linux-gnueabihf' }}
|
||||
# with:
|
||||
# version: 0.10.1
|
||||
- name: Setup toolchain
|
||||
run: ${{ matrix.settings.setup }}
|
||||
if: ${{ matrix.settings.setup }}
|
||||
shell: bash
|
||||
# - name: Setup node x86
|
||||
# if: matrix.settings.target == 'i686-pc-windows-msvc'
|
||||
# run: yarn config set supportedArchitectures.cpu "ia32"
|
||||
# shell: bash
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
# - name: Setup node x86
|
||||
# uses: actions/setup-node@v3
|
||||
# if: matrix.settings.target == 'i686-pc-windows-msvc'
|
||||
# with:
|
||||
# node-version: 18
|
||||
# check-latest: true
|
||||
# cache: yarn
|
||||
# cache-dependency-path: ./components/ordhook-sdk-js/yarn.lock
|
||||
# architecture: x86
|
||||
- name: Build in docker
|
||||
uses: addnab/docker-run-action@v3
|
||||
if: ${{ matrix.settings.docker }}
|
||||
with:
|
||||
image: ${{ matrix.settings.docker }}
|
||||
options: '--user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build'
|
||||
run: ${{ matrix.settings.build }}
|
||||
- name: Build
|
||||
run: ${{ matrix.settings.build }}
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
shell: bash
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bindings-${{ matrix.settings.target }}
|
||||
path: ${{ env.COMPONENT_PATH }}/${{ env.APP_NAME }}.*.node
|
||||
if-no-files-found: error
|
||||
# build-freebsd:
|
||||
# runs-on: macos-12
|
||||
# name: Build FreeBSD
|
||||
# defaults:
|
||||
# run:
|
||||
# working-directory: ./components/ordhook-sdk-js
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - name: Build
|
||||
# id: build
|
||||
# uses: vmactions/freebsd-vm@v0
|
||||
# env:
|
||||
# DEBUG: napi:*
|
||||
# RUSTUP_HOME: /usr/local/rustup
|
||||
# CARGO_HOME: /usr/local/cargo
|
||||
# RUSTUP_IO_THREADS: 1
|
||||
# with:
|
||||
# envs: DEBUG RUSTUP_HOME CARGO_HOME RUSTUP_IO_THREADS
|
||||
# usesh: true
|
||||
# mem: 3000
|
||||
# prepare: |
|
||||
# pkg install -y -f curl node libnghttp2 npm yarn
|
||||
# curl https://sh.rustup.rs -sSf --output rustup.sh
|
||||
# sh rustup.sh -y --profile minimal --default-toolchain beta
|
||||
# export PATH="/usr/local/cargo/bin:$PATH"
|
||||
# echo "~~~~ rustc --version ~~~~"
|
||||
# rustc --version
|
||||
# echo "~~~~ node -v ~~~~"
|
||||
# node -v
|
||||
# echo "~~~~ yarn --version ~~~~"
|
||||
# yarn --version
|
||||
# run: |
|
||||
# export PATH="/usr/local/cargo/bin:$PATH"
|
||||
# pwd
|
||||
# ls -lah
|
||||
# whoami
|
||||
# env
|
||||
# freebsd-version
|
||||
# cd ./components/ordhook-sdk-js
|
||||
# yarn install
|
||||
# yarn build
|
||||
# strip -x *.node
|
||||
# yarn test
|
||||
# rm -rf node_modules
|
||||
# rm -rf target
|
||||
# rm -rf .yarn/cache
|
||||
# - name: Upload artifact
|
||||
# uses: actions/upload-artifact@v3
|
||||
# with:
|
||||
# name: bindings-freebsd
|
||||
# path: ${{ env.COMPONENT_PATH }}/${{ env.APP_NAME }}.*.node
|
||||
# if-no-files-found: error
|
||||
test-macOS-binding:
|
||||
name: Test bindings on ${{ matrix.settings.target }} - node@${{ matrix.node }}
|
||||
needs:
|
||||
- build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
settings:
|
||||
- host: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
# - host: windows-latest
|
||||
# target: x86_64-pc-windows-msvc
|
||||
node:
|
||||
- '14'
|
||||
- '16'
|
||||
- '18'
|
||||
runs-on: ${{ matrix.settings.host }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
check-latest: true
|
||||
cache: yarn
|
||||
cache-dependency-path: ./components/ordhook-sdk-js/yarn.lock
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: bindings-${{ matrix.settings.target }}
|
||||
path: .
|
||||
- name: List packages
|
||||
run: ls -R .
|
||||
shell: bash
|
||||
test-linux-x64-gnu-binding:
|
||||
name: Test bindings on Linux-x64-gnu - node@${{ matrix.node }}
|
||||
needs:
|
||||
- build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node:
|
||||
- '14'
|
||||
- '16'
|
||||
- '18'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./components/ordhook-sdk-js
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
check-latest: true
|
||||
cache: yarn
|
||||
cache-dependency-path: ./components/ordhook-sdk-js/yarn.lock
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: bindings-x86_64-unknown-linux-gnu
|
||||
path: .
|
||||
- name: List packages
|
||||
run: ls -R .
|
||||
shell: bash
|
||||
# - name: Test bindings
|
||||
# run: docker run --rm -v $(pwd):/build -w /build node:${{ matrix.node }}-slim yarn test
|
||||
# test-linux-x64-musl-binding:
|
||||
# name: Test bindings on x86_64-unknown-linux-musl - node@${{ matrix.node }}
|
||||
# needs:
|
||||
# - build
|
||||
# strategy:
|
||||
# fail-fast: false
|
||||
# matrix:
|
||||
# node:
|
||||
# - '14'
|
||||
# - '16'
|
||||
# - '18'
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - name: Setup node
|
||||
# uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: ${{ matrix.node }}
|
||||
# check-latest: true
|
||||
# cache: yarn
|
||||
# cache-dependency-path: ./components/ordhook-sdk-js/yarn.lock
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# yarn config set supportedArchitectures.libc "musl"
|
||||
# yarn install
|
||||
# - name: Download artifacts
|
||||
# uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: bindings-x86_64-unknown-linux-musl
|
||||
# path: .
|
||||
# - name: List packages
|
||||
# run: ls -R .
|
||||
# shell: bash
|
||||
# - name: Test bindings
|
||||
# run: docker run --rm -v $(pwd):/build -w /build node:${{ matrix.node }}-alpine yarn test
|
||||
# test-linux-aarch64-gnu-binding:
|
||||
# name: Test bindings on aarch64-unknown-linux-gnu - node@${{ matrix.node }}
|
||||
# needs:
|
||||
# - build
|
||||
# strategy:
|
||||
# fail-fast: false
|
||||
# matrix:
|
||||
# node:
|
||||
# - '14'
|
||||
# - '16'
|
||||
# - '18'
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - name: Download artifacts
|
||||
# uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: bindings-aarch64-unknown-linux-gnu
|
||||
# path: .
|
||||
# - name: List packages
|
||||
# run: ls -R .
|
||||
# shell: bash
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# yarn config set supportedArchitectures.cpu "arm64"
|
||||
# yarn config set supportedArchitectures.libc "glibc"
|
||||
# yarn install
|
||||
# - name: Set up QEMU
|
||||
# uses: docker/setup-qemu-action@v2
|
||||
# with:
|
||||
# platforms: arm64
|
||||
# - run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||
# - name: Setup and run tests
|
||||
# uses: addnab/docker-run-action@v3
|
||||
# with:
|
||||
# image: node:${{ matrix.node }}-slim
|
||||
# options: '--platform linux/arm64 -v ${{ github.workspace }}:/build -w /build'
|
||||
# run: |
|
||||
# set -e
|
||||
# yarn test
|
||||
# ls -la
|
||||
# test-linux-aarch64-musl-binding:
|
||||
# name: Test bindings on aarch64-unknown-linux-musl - node@${{ matrix.node }}
|
||||
# needs:
|
||||
# - build
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - name: Download artifacts
|
||||
# uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: bindings-aarch64-unknown-linux-musl
|
||||
# path: .
|
||||
# - name: List packages
|
||||
# run: ls -R .
|
||||
# shell: bash
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# yarn config set supportedArchitectures.cpu "arm64"
|
||||
# yarn config set supportedArchitectures.libc "musl"
|
||||
# yarn install
|
||||
# - name: Set up QEMU
|
||||
# uses: docker/setup-qemu-action@v2
|
||||
# with:
|
||||
# platforms: arm64
|
||||
# - run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||
# - name: Setup and run tests
|
||||
# uses: addnab/docker-run-action@v3
|
||||
# with:
|
||||
# image: node:lts-alpine
|
||||
# options: '--platform linux/arm64 -v ${{ github.workspace }}:/build -w /build'
|
||||
# run: |
|
||||
# set -e
|
||||
# yarn test
|
||||
# test-linux-arm-gnueabihf-binding:
|
||||
# name: Test bindings on armv7-unknown-linux-gnueabihf - node@${{ matrix.node }}
|
||||
# needs:
|
||||
# - build
|
||||
# strategy:
|
||||
# fail-fast: false
|
||||
# matrix:
|
||||
# node:
|
||||
# - '14'
|
||||
# - '16'
|
||||
# - '18'
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - name: Download artifacts
|
||||
# uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: bindings-armv7-unknown-linux-gnueabihf
|
||||
# path: .
|
||||
# - name: List packages
|
||||
# run: ls -R .
|
||||
# shell: bash
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# yarn config set supportedArchitectures.cpu "arm"
|
||||
# yarn install
|
||||
# - name: Set up QEMU
|
||||
# uses: docker/setup-qemu-action@v2
|
||||
# with:
|
||||
# platforms: arm
|
||||
# - run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||
# - name: Setup and run tests
|
||||
# uses: addnab/docker-run-action@v3
|
||||
# with:
|
||||
# image: node:${{ matrix.node }}-bullseye-slim
|
||||
# options: '--platform linux/arm/v7 -v ${{ github.workspace }}:/build -w /build'
|
||||
# run: |
|
||||
# set -e
|
||||
# yarn test
|
||||
# ls -la
|
||||
universal-macOS:
|
||||
name: Build universal macOS binary
|
||||
needs:
|
||||
- build
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
check-latest: true
|
||||
cache: yarn
|
||||
cache-dependency-path: ./components/ordhook-sdk-js/yarn.lock
|
||||
- name: Install dependencies
|
||||
run: yarn --cwd components/ordhook-sdk-js install
|
||||
- name: Download macOS x64 artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: bindings-x86_64-apple-darwin
|
||||
path: components/ordhook-sdk-js/artifacts
|
||||
- name: Download macOS arm64 artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: bindings-aarch64-apple-darwin
|
||||
path: components/ordhook-sdk-js/artifacts
|
||||
- name: Combine binaries
|
||||
run: yarn --cwd components/ordhook-sdk-js universal
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bindings-universal-apple-darwin
|
||||
path: ${{ env.COMPONENT_PATH }}/${{ env.APP_NAME }}.*.node
|
||||
if-no-files-found: error
|
||||
publish:
|
||||
name: Publish
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
# - build-freebsd
|
||||
- test-macOS-binding
|
||||
- test-linux-x64-gnu-binding
|
||||
# - test-linux-x64-musl-binding
|
||||
# - test-linux-aarch64-gnu-binding
|
||||
# - test-linux-aarch64-musl-binding
|
||||
# - test-linux-arm-gnueabihf-binding
|
||||
- universal-macOS
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
check-latest: true
|
||||
cache: yarn
|
||||
cache-dependency-path: ./components/ordhook-sdk-js/yarn.lock
|
||||
- name: Install dependencies
|
||||
run: yarn --cwd components/ordhook-sdk-js install
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: artifacts
|
||||
- name: Move artifacts
|
||||
run: yarn --cwd components/ordhook-sdk-js artifacts
|
||||
- name: List packages
|
||||
run: ls -R components/ordhook-sdk-js/./npm
|
||||
shell: bash
|
||||
- name: Publish
|
||||
run: |
|
||||
cd components/ordhook-sdk-js
|
||||
npm config set provenance true
|
||||
if git log -1 --pretty=%B | grep "^[0-9]\+\.[0-9]\+\.[0-9]\+$";
|
||||
then
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
npm publish --access public
|
||||
elif git log -1 --pretty=%B | grep "^[0-9]\+\.[0-9]\+\.[0-9]\+";
|
||||
then
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
npm publish --tag next --access public
|
||||
else
|
||||
echo "Not a release, skipping publish"
|
||||
fi
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
198
.gitignore
vendored
198
.gitignore
vendored
@@ -22,3 +22,201 @@ components/chainhook-types-js/dist
|
||||
cache/
|
||||
./tests
|
||||
tmp/
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/node
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node
|
||||
|
||||
### Node ###
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
.env.test
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/node
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/macos
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=macos
|
||||
|
||||
### macOS ###
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two
|
||||
Icon
|
||||
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
### macOS Patch ###
|
||||
# iCloud generated files
|
||||
*.icloud
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/macos
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/windows
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=windows
|
||||
|
||||
### Windows ###
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
Thumbs.db:encryptable
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
# Dump file
|
||||
*.stackdump
|
||||
|
||||
# Folder config file
|
||||
[Dd]esktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Windows Installer files
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/windows
|
||||
|
||||
#Added by cargo
|
||||
|
||||
/target
|
||||
Cargo.lock
|
||||
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
|
||||
*.node
|
||||
|
||||
683
Cargo.lock
generated
683
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -14,7 +14,7 @@ num_cpus = "1.16.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_derive = "1"
|
||||
reqwest = { version = "0.11", features = ["stream", "json"] }
|
||||
reqwest = { version = "0.11", default-features = false, features = ["stream", "json", "rustls-tls"] }
|
||||
hiro-system-kit = "0.3.1"
|
||||
clap = { version = "3.2.23", features = ["derive"], optional = true }
|
||||
clap_generate = { version = "3.0.3", optional = true }
|
||||
|
||||
@@ -108,6 +108,9 @@ struct ScanBlocksCommand {
|
||||
/// HTTP Post activity to a URL
|
||||
#[clap(long = "post-to")]
|
||||
pub post_to: Option<String>,
|
||||
/// HTTP Auth token
|
||||
#[clap(long = "auth-token")]
|
||||
pub auth_token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Parser, PartialEq, Clone, Debug)]
|
||||
@@ -284,6 +287,9 @@ struct StartCommand {
|
||||
/// Block height where ordhook will start posting Ordinals activities
|
||||
#[clap(long = "start-at-block")]
|
||||
pub start_at_block: Option<u64>,
|
||||
/// HTTP Auth token
|
||||
#[clap(long = "auth-token")]
|
||||
pub auth_token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Subcommand, PartialEq, Clone, Debug)]
|
||||
@@ -499,10 +505,16 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
&post_to,
|
||||
cmd.start_block,
|
||||
Some(cmd.end_block),
|
||||
cmd.auth_token,
|
||||
)?
|
||||
.into_selected_network_specification(&config.network.bitcoin_network)?;
|
||||
scan_bitcoin_chainstate_via_rpc_using_predicate(&predicate_spec, &config, &ctx)
|
||||
.await?;
|
||||
scan_bitcoin_chainstate_via_rpc_using_predicate(
|
||||
&predicate_spec,
|
||||
&config,
|
||||
None,
|
||||
&ctx,
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
let _ = download_ordinals_dataset_if_required(&config, ctx).await;
|
||||
let mut total_inscriptions = 0;
|
||||
@@ -635,7 +647,13 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
let mut predicates = vec![];
|
||||
|
||||
for post_to in cmd.post_to.iter() {
|
||||
let predicate = build_predicate_from_cli(&config, post_to, start_block, None)?;
|
||||
let predicate = build_predicate_from_cli(
|
||||
&config,
|
||||
post_to,
|
||||
start_block,
|
||||
None,
|
||||
cmd.auth_token.clone(),
|
||||
)?;
|
||||
predicates.push(ChainhookFullSpecification::Bitcoin(predicate));
|
||||
}
|
||||
|
||||
@@ -830,6 +848,7 @@ pub fn build_predicate_from_cli(
|
||||
post_to: &str,
|
||||
start_block: u64,
|
||||
end_block: Option<u64>,
|
||||
auth_token: Option<String>,
|
||||
) -> Result<BitcoinChainhookFullSpecification, String> {
|
||||
let mut networks = BTreeMap::new();
|
||||
// Retrieve last block height known, and display it
|
||||
@@ -847,7 +866,7 @@ pub fn build_predicate_from_cli(
|
||||
predicate: BitcoinPredicateType::OrdinalsProtocol(OrdinalOperations::InscriptionFeed),
|
||||
action: HookAction::HttpPost(HttpHook {
|
||||
url: post_to.to_string(),
|
||||
authorization_header: "".to_string(),
|
||||
authorization_header: format!("Bearer {}", auth_token.unwrap_or("".to_string())),
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -12,10 +12,10 @@ redis = "0.21.5"
|
||||
serde-redis = "0.12.0"
|
||||
hex = "0.4.3"
|
||||
rand = "0.8.5"
|
||||
chainhook-sdk = { version = "=0.9.5", default-features = false, features = ["zeromq", "log"] }
|
||||
# chainhook-sdk = { version = "=0.9.0", path = "../../../chainhook/components/chainhook-sdk", default-features = false, features = ["zeromq", "log"] }
|
||||
chainhook-sdk = { version = "=0.10.5", features = ["zeromq"] }
|
||||
# chainhook-sdk = { version = "=0.10.1", path = "../../../chainhook/components/chainhook-sdk", default-features = false, features = ["zeromq", "log"] }
|
||||
hiro-system-kit = "0.3.1"
|
||||
reqwest = { version = "0.11", features = ["stream", "json"] }
|
||||
reqwest = { version = "0.11", default-features = false, features = ["stream", "json", "rustls-tls"] }
|
||||
tokio = { version = "=1.24", features = ["full"] }
|
||||
futures-util = "0.3.24"
|
||||
flate2 = "1.0.24"
|
||||
@@ -33,7 +33,7 @@ fxhash = "0.2.1"
|
||||
rusqlite = { version = "0.27.0", features = ["bundled"] }
|
||||
anyhow = { version = "1.0.56", features = ["backtrace"] }
|
||||
schemars = { version = "0.8.10", git = "https://github.com/hirosystems/schemars.git", branch = "feat-chainhook-fixes" }
|
||||
pprof = { version = "0.12", features = ["flamegraph"] }
|
||||
pprof = { version = "0.13.0", features = ["flamegraph"], optional = true }
|
||||
progressing = '3'
|
||||
futures = "0.3.28"
|
||||
|
||||
@@ -46,5 +46,5 @@ features = ["lz4", "snappy"]
|
||||
# debug = true
|
||||
|
||||
[features]
|
||||
debug = ["hiro-system-kit/debug"]
|
||||
debug = ["hiro-system-kit/debug", "pprof"]
|
||||
release = ["hiro-system-kit/release"]
|
||||
|
||||
@@ -94,16 +94,14 @@ pub fn compute_next_satpoint_data(
|
||||
SatPosition::Output((output_index, (offset_cross_inputs - offset_intra_outputs)))
|
||||
}
|
||||
|
||||
pub fn should_sync_rocks_db(
|
||||
config: &Config,
|
||||
ctx: &Context,
|
||||
) -> Result<Option<(u64, u64)>, String> {
|
||||
pub fn should_sync_rocks_db(config: &Config, ctx: &Context) -> Result<Option<(u64, u64)>, String> {
|
||||
let blocks_db = open_readwrite_ordhook_db_conn_rocks_db(&config.expected_cache_path(), &ctx)?;
|
||||
let inscriptions_db_conn = open_readonly_ordhook_db_conn(&config.expected_cache_path(), &ctx)?;
|
||||
let last_compressed_block = find_last_block_inserted(&blocks_db) as u64;
|
||||
let last_indexed_block = match find_latest_inscription_block_height(&inscriptions_db_conn, ctx)? {
|
||||
let last_indexed_block = match find_latest_inscription_block_height(&inscriptions_db_conn, ctx)?
|
||||
{
|
||||
Some(last_indexed_block) => last_indexed_block,
|
||||
None => 0
|
||||
None => 0,
|
||||
};
|
||||
|
||||
let res = if last_compressed_block < last_indexed_block {
|
||||
@@ -164,7 +162,6 @@ pub fn should_sync_ordhook_db(
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// TODO: Gracefully handle Regtest, Testnet and Signet
|
||||
let (mut end_block, speed) = if start_block < 200_000 {
|
||||
(end_block.min(200_000), 10_000)
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
use chainhook_sdk::{types::BitcoinBlockData, utils::Context};
|
||||
use crossbeam_channel::{Sender, TryRecvError};
|
||||
use rocksdb::DB;
|
||||
use std::{
|
||||
thread::{sleep, JoinHandle},
|
||||
time::Duration,
|
||||
};
|
||||
use crossbeam_channel::{Sender, TryRecvError};
|
||||
use chainhook_sdk::{types::BitcoinBlockData, utils::Context};
|
||||
use rocksdb::DB;
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
core::pipeline::{PostProcessorCommand, PostProcessorController, PostProcessorEvent},
|
||||
db::{
|
||||
insert_entry_in_blocks,
|
||||
open_readwrite_ordhook_db_conn_rocks_db, LazyBlock,
|
||||
},
|
||||
db::{insert_entry_in_blocks, open_readwrite_ordhook_db_conn_rocks_db, LazyBlock},
|
||||
};
|
||||
|
||||
pub fn start_block_archiving_processor(
|
||||
|
||||
@@ -32,7 +32,10 @@ use crate::{
|
||||
},
|
||||
OrdhookConfig,
|
||||
},
|
||||
db::{get_any_entry_in_ordinal_activities, open_readonly_ordhook_db_conn},
|
||||
db::{
|
||||
get_any_entry_in_ordinal_activities, open_ordhook_db_conn_rocks_db_loop,
|
||||
open_readonly_ordhook_db_conn,
|
||||
},
|
||||
};
|
||||
|
||||
use crate::db::{LazyBlockTransaction, TraversalResult};
|
||||
@@ -43,7 +46,7 @@ use crate::{
|
||||
new_traversals_lazy_cache,
|
||||
pipeline::{PostProcessorCommand, PostProcessorController, PostProcessorEvent},
|
||||
},
|
||||
db::{open_readwrite_ordhook_db_conn, open_readwrite_ordhook_db_conn_rocks_db},
|
||||
db::open_readwrite_ordhook_db_conn,
|
||||
};
|
||||
|
||||
pub fn start_inscription_indexing_processor(
|
||||
@@ -66,8 +69,7 @@ pub fn start_inscription_indexing_processor(
|
||||
open_readwrite_ordhook_db_conn(&config.expected_cache_path(), &ctx).unwrap();
|
||||
let ordhook_config = config.get_ordhook_config();
|
||||
let blocks_db_rw =
|
||||
open_readwrite_ordhook_db_conn_rocks_db(&config.expected_cache_path(), &ctx)
|
||||
.unwrap();
|
||||
open_ordhook_db_conn_rocks_db_loop(true, &config.expected_cache_path(), &ctx);
|
||||
let mut empty_cycles = 0;
|
||||
|
||||
let inscriptions_db_conn =
|
||||
|
||||
@@ -24,8 +24,7 @@ use crate::{
|
||||
find_blessed_inscription_with_ordinal_number,
|
||||
find_latest_cursed_inscription_number_at_block_height,
|
||||
find_latest_inscription_number_at_block_height, format_satpoint_to_watch,
|
||||
update_inscriptions_with_block, LazyBlockTransaction,
|
||||
TraversalResult,
|
||||
update_inscriptions_with_block, LazyBlockTransaction, TraversalResult,
|
||||
},
|
||||
ord::height::Height,
|
||||
};
|
||||
@@ -473,11 +472,7 @@ pub fn augment_block_with_ordinals_inscriptions_data_and_write_to_db_tx(
|
||||
);
|
||||
|
||||
// Store inscriptions
|
||||
update_inscriptions_with_block(
|
||||
block,
|
||||
inscriptions_db_tx,
|
||||
ctx,
|
||||
);
|
||||
update_inscriptions_with_block(block, inscriptions_db_tx, ctx);
|
||||
|
||||
any_events
|
||||
}
|
||||
@@ -523,7 +518,9 @@ pub fn augment_block_with_ordinals_inscriptions_data(
|
||||
|
||||
// Handle sats overflow
|
||||
while let Some((tx_index, op_index)) = sats_overflows.pop_front() {
|
||||
let OrdinalOperation::InscriptionRevealed(ref mut inscription_data) = block.transactions[tx_index].metadata.ordinal_operations[op_index] else {
|
||||
let OrdinalOperation::InscriptionRevealed(ref mut inscription_data) =
|
||||
block.transactions[tx_index].metadata.ordinal_operations[op_index]
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let is_curse = inscription_data.curse_type.is_some();
|
||||
@@ -711,7 +708,10 @@ fn consolidate_transaction_with_pre_computed_inscription_data(
|
||||
OrdinalOperation::InscriptionTransferred(_) => continue,
|
||||
};
|
||||
|
||||
let Some(traversal) = inscriptions_data.remove(&(tx.transaction_identifier.clone(), inscription.inscription_input_index)) else {
|
||||
let Some(traversal) = inscriptions_data.remove(&(
|
||||
tx.transaction_identifier.clone(),
|
||||
inscription.inscription_input_index,
|
||||
)) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ use chainhook_sdk::{
|
||||
bitcoincore_rpc_json::bitcoin::{hashes::hex::FromHex, Address, Network, Script},
|
||||
types::{
|
||||
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData, BlockIdentifier,
|
||||
OrdinalInscriptionTransferData, OrdinalOperation, TransactionIdentifier,
|
||||
OrdinalInscriptionTransferData, OrdinalOperation, TransactionIdentifier, OrdinalInscriptionTransferDestination,
|
||||
},
|
||||
utils::Context,
|
||||
};
|
||||
@@ -114,7 +114,7 @@ pub fn augment_transaction_with_ordinals_transfers_data(
|
||||
let (
|
||||
outpoint_post_transfer,
|
||||
offset_post_transfer,
|
||||
updated_address,
|
||||
destination,
|
||||
post_transfer_output_value,
|
||||
) = match post_transfer_data {
|
||||
SatPosition::Output((output_index, offset)) => {
|
||||
@@ -124,7 +124,7 @@ pub fn augment_transaction_with_ordinals_transfers_data(
|
||||
tx.metadata.outputs[output_index].get_script_pubkey_hex();
|
||||
let updated_address = match Script::from_hex(&script_pub_key_hex) {
|
||||
Ok(script) => match Address::from_script(&script, network.clone()) {
|
||||
Ok(address) => Some(address.to_string()),
|
||||
Ok(address) => OrdinalInscriptionTransferDestination::Transferred(address.to_string()),
|
||||
Err(e) => {
|
||||
ctx.try_log(|logger| {
|
||||
warn!(
|
||||
@@ -133,7 +133,7 @@ pub fn augment_transaction_with_ordinals_transfers_data(
|
||||
e.to_string()
|
||||
)
|
||||
});
|
||||
None
|
||||
OrdinalInscriptionTransferDestination::Burnt(script.to_string())
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
@@ -144,7 +144,7 @@ pub fn augment_transaction_with_ordinals_transfers_data(
|
||||
e.to_string()
|
||||
)
|
||||
});
|
||||
None
|
||||
OrdinalInscriptionTransferDestination::Burnt(script_pub_key_hex.to_string())
|
||||
}
|
||||
};
|
||||
|
||||
@@ -181,7 +181,7 @@ pub fn augment_transaction_with_ordinals_transfers_data(
|
||||
offset
|
||||
)
|
||||
});
|
||||
(outpoint, total_offset, None, None)
|
||||
(outpoint, total_offset, OrdinalInscriptionTransferDestination::SpentInFees, None)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -190,7 +190,7 @@ pub fn augment_transaction_with_ordinals_transfers_data(
|
||||
|
||||
let transfer_data = OrdinalInscriptionTransferData {
|
||||
inscription_id: watched_satpoint.inscription_id.clone(),
|
||||
updated_address,
|
||||
destination,
|
||||
tx_index,
|
||||
satpoint_pre_transfer,
|
||||
satpoint_post_transfer,
|
||||
|
||||
@@ -20,7 +20,10 @@ use chainhook_sdk::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
core::protocol::inscription_parsing::{get_inscriptions_revealed_in_block, get_inscriptions_transferred_in_block}, ord::sat::Sat,
|
||||
core::protocol::inscription_parsing::{
|
||||
get_inscriptions_revealed_in_block, get_inscriptions_transferred_in_block,
|
||||
},
|
||||
ord::sat::Sat,
|
||||
};
|
||||
|
||||
pub fn get_default_ordhook_db_file_path(base_dir: &PathBuf) -> PathBuf {
|
||||
@@ -228,7 +231,7 @@ pub fn open_readonly_ordhook_db_conn_rocks_db(
|
||||
opts.set_disable_auto_compactions(true);
|
||||
opts.set_max_background_jobs(0);
|
||||
let db = DB::open_for_read_only(&opts, path, false)
|
||||
.map_err(|e| format!("unable to open hord.rocksdb: {}", e.to_string()))?;
|
||||
.map_err(|e| format!("unable to read hord.rocksdb: {}", e.to_string()))?;
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
@@ -276,7 +279,7 @@ pub fn open_readwrite_ordhook_db_conn_rocks_db(
|
||||
let path = get_default_ordhook_db_file_path_rocks_db(&base_dir);
|
||||
let opts = rocks_db_default_options();
|
||||
let db = DB::open(&opts, path)
|
||||
.map_err(|e| format!("unable to open hord.rocksdb: {}", e.to_string()))?;
|
||||
.map_err(|e| format!("unable to read-write hord.rocksdb: {}", e.to_string()))?;
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
@@ -494,12 +497,18 @@ pub fn insert_transfer_in_locations(
|
||||
pub fn get_any_entry_in_ordinal_activities(
|
||||
block_height: &u64,
|
||||
inscriptions_db_tx: &Connection,
|
||||
_ctx: &Context,
|
||||
ctx: &Context,
|
||||
) -> bool {
|
||||
let args: &[&dyn ToSql] = &[&block_height.to_sql().unwrap()];
|
||||
let mut stmt = inscriptions_db_tx
|
||||
let mut stmt = match inscriptions_db_tx
|
||||
.prepare("SELECT DISTINCT block_height FROM inscriptions WHERE block_height = ?")
|
||||
.unwrap();
|
||||
{
|
||||
Ok(stmt) => stmt,
|
||||
Err(e) => {
|
||||
ctx.try_log(|logger| error!(logger, "{}", e.to_string()));
|
||||
panic!();
|
||||
}
|
||||
};
|
||||
let mut rows = stmt.query(args).unwrap();
|
||||
while let Ok(Some(_)) = rows.next() {
|
||||
return true;
|
||||
@@ -824,15 +833,15 @@ pub fn find_all_inscriptions_in_block(
|
||||
{ parse_inscription_id(&inscription_id) };
|
||||
let Some(transfer_data) = transfers_data
|
||||
.get(&inscription_id)
|
||||
.and_then(|entries| entries.first()) else {
|
||||
ctx.try_log(|logger| {
|
||||
error!(
|
||||
logger,
|
||||
"unable to retrieve inscription genesis transfer data: {}",
|
||||
inscription_id,
|
||||
)
|
||||
});
|
||||
continue;
|
||||
.and_then(|entries| entries.first())
|
||||
else {
|
||||
ctx.try_log(|logger| {
|
||||
error!(
|
||||
logger,
|
||||
"unable to retrieve inscription genesis transfer data: {}", inscription_id,
|
||||
)
|
||||
});
|
||||
continue;
|
||||
};
|
||||
let traversal = TraversalResult {
|
||||
inscription_number,
|
||||
|
||||
@@ -6,9 +6,11 @@ use flate2::read::GzDecoder;
|
||||
use futures_util::StreamExt;
|
||||
use progressing::mapping::Bar as MappingBar;
|
||||
use progressing::Baring;
|
||||
use tar::Archive;
|
||||
use std::fs::{self, File};
|
||||
use std::io::{self, Cursor};
|
||||
use std::io::{Read, Write};
|
||||
use std::path::PathBuf;
|
||||
use tar::Archive;
|
||||
|
||||
pub fn default_sqlite_file_path(_network: &BitcoinNetwork) -> String {
|
||||
format!("hord.sqlite").to_lowercase()
|
||||
@@ -18,10 +20,12 @@ pub fn default_sqlite_sha_file_path(_network: &BitcoinNetwork) -> String {
|
||||
format!("hord.sqlite.sha256").to_lowercase()
|
||||
}
|
||||
|
||||
pub async fn download_sqlite_file(config: &Config, _ctx: &Context) -> Result<(), String> {
|
||||
pub async fn download_sqlite_file(config: &Config, ctx: &Context) -> Result<(), String> {
|
||||
let destination_path = config.expected_cache_path();
|
||||
std::fs::create_dir_all(&destination_path).unwrap_or_else(|e| {
|
||||
println!("{}", e.to_string());
|
||||
if ctx.logger.is_some() {
|
||||
println!("{}", e.to_string());
|
||||
}
|
||||
});
|
||||
|
||||
// let remote_sha_url = config.expected_remote_ordinals_sqlite_sha256();
|
||||
@@ -39,36 +43,66 @@ pub async fn download_sqlite_file(config: &Config, _ctx: &Context) -> Result<(),
|
||||
// write_file_content_at_path(&local_sha_file_path, &res.to_vec())?;
|
||||
|
||||
let file_url = config.expected_remote_ordinals_sqlite_url();
|
||||
println!("=> {file_url}");
|
||||
if ctx.logger.is_some() {
|
||||
println!("=> {file_url}");
|
||||
}
|
||||
let res = reqwest::get(&file_url)
|
||||
.await
|
||||
.or(Err(format!("Failed to GET from '{}'", &file_url)))?;
|
||||
|
||||
// Download chunks
|
||||
let (tx, rx) = flume::bounded(0);
|
||||
|
||||
let decoder_thread = std::thread::spawn(move || {
|
||||
let input = ChannelRead::new(rx);
|
||||
let mut decoder = GzDecoder::new(input);
|
||||
let mut content = Vec::new();
|
||||
let _ = decoder.read_to_end(&mut content);
|
||||
let mut archive = Archive::new(&content[..]);
|
||||
if let Err(e) = archive.unpack(&destination_path) {
|
||||
println!("unable to write file: {}", e.to_string());
|
||||
std::process::exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
if res.status() == reqwest::StatusCode::OK {
|
||||
let limit = res.content_length().unwrap_or(10_000_000_000) as i64;
|
||||
let archive_tmp_file = PathBuf::from("db.tar");
|
||||
let decoder_thread = std::thread::spawn(move || {
|
||||
{
|
||||
let input = ChannelRead::new(rx);
|
||||
let mut decoder = GzDecoder::new(input);
|
||||
let mut tmp = File::create(&archive_tmp_file).unwrap();
|
||||
let mut buffer = [0; 512_000];
|
||||
loop {
|
||||
match decoder.read(&mut buffer) {
|
||||
Ok(0) => break,
|
||||
Ok(n) => {
|
||||
if let Err(e) = tmp.write_all(&buffer[..n]) {
|
||||
let err = format!(
|
||||
"unable to update compressed archive: {}",
|
||||
e.to_string()
|
||||
);
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let err =
|
||||
format!("unable to write compressed archive: {}", e.to_string());
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
let _ = tmp.flush();
|
||||
}
|
||||
let archive_file = File::open(&archive_tmp_file).unwrap();
|
||||
let mut archive = Archive::new(archive_file);
|
||||
if let Err(e) = archive.unpack(&destination_path) {
|
||||
let err = format!("unable to decompress file: {}", e.to_string());
|
||||
return Err(err);
|
||||
}
|
||||
let _ = fs::remove_file(archive_tmp_file);
|
||||
Ok(())
|
||||
});
|
||||
|
||||
let mut progress_bar = MappingBar::with_range(0i64, limit);
|
||||
progress_bar.set_len(60);
|
||||
let mut stdout = std::io::stdout();
|
||||
print!("{}", progress_bar);
|
||||
let _ = stdout.flush();
|
||||
if ctx.logger.is_some() {
|
||||
print!("{}", progress_bar);
|
||||
let _ = stdout.flush();
|
||||
}
|
||||
let mut stream = res.bytes_stream();
|
||||
let mut progress = 0;
|
||||
let mut steps = 0;
|
||||
let mut tx_err = None;
|
||||
while let Some(item) = stream.next().await {
|
||||
let chunk = item.or(Err(format!("Error while downloading file")))?;
|
||||
progress += chunk.len() as i64;
|
||||
@@ -78,24 +112,28 @@ pub async fn download_sqlite_file(config: &Config, _ctx: &Context) -> Result<(),
|
||||
}
|
||||
progress_bar.set(progress);
|
||||
if steps == 0 {
|
||||
print!("\r{}", progress_bar);
|
||||
let _ = stdout.flush();
|
||||
if ctx.logger.is_some() {
|
||||
print!("\r{}", progress_bar);
|
||||
let _ = stdout.flush();
|
||||
}
|
||||
}
|
||||
if let Err(e) = tx.send_async(chunk.to_vec()).await {
|
||||
let err = format!("unable to download archive: {}", e.to_string());
|
||||
tx_err = Some(err);
|
||||
break;
|
||||
}
|
||||
tx.send_async(chunk.to_vec())
|
||||
.await
|
||||
.map_err(|e| format!("unable to download stacks event: {}", e.to_string()))?;
|
||||
}
|
||||
progress_bar.set(limit);
|
||||
print!("\r{}", progress_bar);
|
||||
let _ = stdout.flush();
|
||||
println!();
|
||||
if ctx.logger.is_some() {
|
||||
print!("\r{}", progress_bar);
|
||||
let _ = stdout.flush();
|
||||
println!();
|
||||
}
|
||||
drop(tx);
|
||||
}
|
||||
|
||||
tokio::task::spawn_blocking(|| decoder_thread.join())
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
decoder_thread.join().unwrap()?;
|
||||
if let Some(_e) = tx_err.take() {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -26,10 +26,10 @@ use chainhook_sdk::types::{
|
||||
use chainhook_sdk::utils::{file_append, send_request, BlockHeights, Context};
|
||||
use std::collections::HashMap;
|
||||
|
||||
// TODO(lgalabru): Re-introduce support for blocks[] !!! gracefully handle hints for non consecutive blocks
|
||||
pub async fn scan_bitcoin_chainstate_via_rpc_using_predicate(
|
||||
predicate_spec: &BitcoinChainhookSpecification,
|
||||
config: &Config,
|
||||
event_observer_config_override: Option<&EventObserverConfig>,
|
||||
ctx: &Context,
|
||||
) -> Result<(), String> {
|
||||
let _ = download_ordinals_dataset_if_required(config, ctx).await;
|
||||
@@ -85,7 +85,10 @@ pub async fn scan_bitcoin_chainstate_via_rpc_using_predicate(
|
||||
let mut actions_triggered = 0;
|
||||
let mut err_count = 0;
|
||||
|
||||
let event_observer_config = config.get_event_observer_config();
|
||||
let event_observer_config = match event_observer_config_override {
|
||||
Some(config_override) => config_override.clone(),
|
||||
None => config.get_event_observer_config(),
|
||||
};
|
||||
let bitcoin_config = event_observer_config.get_bitcoin_config();
|
||||
let number_of_blocks_to_scan = block_heights_to_scan.len() as u64;
|
||||
let mut number_of_blocks_scanned = 0;
|
||||
@@ -95,15 +98,6 @@ pub async fn scan_bitcoin_chainstate_via_rpc_using_predicate(
|
||||
while let Some(current_block_height) = block_heights_to_scan.pop_front() {
|
||||
number_of_blocks_scanned += 1;
|
||||
|
||||
// Re-initiate connection every 250 blocks (pessimistic) to avoid stale connections
|
||||
let conn_updated = if number_of_blocks_scanned % 250 == 0 {
|
||||
inscriptions_db_conn =
|
||||
open_readonly_ordhook_db_conn(&config.expected_cache_path(), ctx)?;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if !get_any_entry_in_ordinal_activities(¤t_block_height, &inscriptions_db_conn, &ctx)
|
||||
{
|
||||
continue;
|
||||
@@ -151,7 +145,7 @@ pub async fn scan_bitcoin_chainstate_via_rpc_using_predicate(
|
||||
|
||||
info!(
|
||||
ctx.expect_logger(),
|
||||
"Processing block #{current_block_height} through {} predicate ({} inscriptions revealed: [{}], db_conn updated: {conn_updated})",
|
||||
"Processing block #{current_block_height} through {} predicate ({} inscriptions revealed: [{}])",
|
||||
predicate_spec.uuid,
|
||||
inscriptions_revealed.len(),
|
||||
inscriptions_revealed.join(", ")
|
||||
|
||||
@@ -14,10 +14,9 @@ use crate::core::protocol::inscription_parsing::{
|
||||
use crate::core::protocol::inscription_sequencing::SequenceCursor;
|
||||
use crate::core::{new_traversals_lazy_cache, should_sync_ordhook_db, should_sync_rocks_db};
|
||||
use crate::db::{
|
||||
delete_data_in_ordhook_db, insert_entry_in_blocks,
|
||||
update_inscriptions_with_block, update_locations_with_block,
|
||||
open_readwrite_ordhook_db_conn, open_readwrite_ordhook_db_conn_rocks_db,
|
||||
open_readwrite_ordhook_dbs, LazyBlock, LazyBlockTransaction,
|
||||
delete_data_in_ordhook_db, insert_entry_in_blocks, open_readwrite_ordhook_db_conn,
|
||||
open_readwrite_ordhook_db_conn_rocks_db, open_readwrite_ordhook_dbs,
|
||||
update_inscriptions_with_block, update_locations_with_block, LazyBlock, LazyBlockTransaction,
|
||||
};
|
||||
use crate::scan::bitcoin::process_block_with_predicates;
|
||||
use crate::service::http_api::start_predicate_api_server;
|
||||
@@ -49,8 +48,8 @@ use std::sync::mpsc::channel;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct Service {
|
||||
config: Config,
|
||||
ctx: Context,
|
||||
pub config: Config,
|
||||
pub ctx: Context,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
@@ -217,7 +216,7 @@ impl Service {
|
||||
>,
|
||||
) -> Result<(), String> {
|
||||
let PredicatesApi::On(ref api_config) = self.config.http_api else {
|
||||
return Ok(())
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let (bitcoin_scan_op_tx, bitcoin_scan_op_rx) = crossbeam_channel::unbounded();
|
||||
@@ -388,7 +387,7 @@ impl Service {
|
||||
);
|
||||
event_observer_config.chainhook_config = Some(chainhook_config);
|
||||
let data_rx = if enable_internal_trigger {
|
||||
let (tx, rx) = crossbeam_channel::unbounded();
|
||||
let (tx, rx) = crossbeam_channel::bounded(256);
|
||||
event_observer_config.data_handler_tx = Some(tx);
|
||||
Some(rx)
|
||||
} else {
|
||||
@@ -503,7 +502,7 @@ impl Service {
|
||||
while let Some((start_block, end_block, speed)) =
|
||||
should_sync_ordhook_db(&self.config, &self.ctx)?
|
||||
{
|
||||
if last_block_processed == end_block {
|
||||
if last_block_processed == end_block {
|
||||
break;
|
||||
}
|
||||
let blocks_post_processor = start_inscription_indexing_processor(
|
||||
@@ -598,12 +597,14 @@ fn chainhook_sidecar_mutate_ordhook_db(command: HandleBlock, config: &Config, ct
|
||||
let compressed_block: LazyBlock = match LazyBlock::from_standardized_block(&block) {
|
||||
Ok(block) => block,
|
||||
Err(e) => {
|
||||
error!(
|
||||
ctx.expect_logger(),
|
||||
"Unable to compress block #{}: #{}",
|
||||
block.block_identifier.index,
|
||||
e.to_string()
|
||||
);
|
||||
ctx.try_log(|logger| {
|
||||
error!(
|
||||
logger,
|
||||
"Unable to compress block #{}: #{}",
|
||||
block.block_identifier.index,
|
||||
e.to_string()
|
||||
)
|
||||
});
|
||||
return;
|
||||
}
|
||||
};
|
||||
@@ -616,17 +617,9 @@ fn chainhook_sidecar_mutate_ordhook_db(command: HandleBlock, config: &Config, ct
|
||||
);
|
||||
let _ = blocks_db_rw.flush();
|
||||
|
||||
update_inscriptions_with_block(
|
||||
&block,
|
||||
&inscriptions_db_conn_rw,
|
||||
&ctx,
|
||||
);
|
||||
update_inscriptions_with_block(&block, &inscriptions_db_conn_rw, &ctx);
|
||||
|
||||
update_locations_with_block(
|
||||
&block,
|
||||
&inscriptions_db_conn_rw,
|
||||
&ctx,
|
||||
);
|
||||
update_locations_with_block(&block, &inscriptions_db_conn_rw, &ctx);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -709,12 +702,14 @@ pub fn chainhook_sidecar_mutate_blocks(
|
||||
let compressed_block: LazyBlock = match LazyBlock::from_standardized_block(&cache.block) {
|
||||
Ok(block) => block,
|
||||
Err(e) => {
|
||||
error!(
|
||||
ctx.expect_logger(),
|
||||
"Unable to compress block #{}: #{}",
|
||||
cache.block.block_identifier.index,
|
||||
e.to_string()
|
||||
);
|
||||
ctx.try_log(|logger| {
|
||||
error!(
|
||||
logger,
|
||||
"Unable to compress block #{}: #{}",
|
||||
cache.block.block_identifier.index,
|
||||
e.to_string()
|
||||
)
|
||||
});
|
||||
continue;
|
||||
}
|
||||
};
|
||||
@@ -729,16 +724,8 @@ pub fn chainhook_sidecar_mutate_blocks(
|
||||
let _ = blocks_db_rw.flush();
|
||||
|
||||
if cache.processed_by_sidecar {
|
||||
update_inscriptions_with_block(
|
||||
&cache.block,
|
||||
&inscriptions_db_tx,
|
||||
&ctx,
|
||||
);
|
||||
update_locations_with_block(
|
||||
&cache.block,
|
||||
&inscriptions_db_tx,
|
||||
&ctx,
|
||||
);
|
||||
update_inscriptions_with_block(&cache.block, &inscriptions_db_tx, &ctx);
|
||||
update_locations_with_block(&cache.block, &inscriptions_db_tx, &ctx);
|
||||
} else {
|
||||
updated_blocks_ids.push(format!("{}", cache.block.block_identifier.index));
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ pub fn start_bitcoin_scan_runloop(
|
||||
let op = scan_bitcoin_chainstate_via_rpc_using_predicate(
|
||||
&predicate_spec,
|
||||
&moved_config,
|
||||
None,
|
||||
&moved_ctx,
|
||||
);
|
||||
|
||||
|
||||
3
components/ordhook-sdk-js/.cargo/config.toml
Normal file
3
components/ordhook-sdk-js/.cargo/config.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[target.aarch64-unknown-linux-musl]
|
||||
linker = "aarch64-linux-musl-gcc"
|
||||
rustflags = ["-C", "target-feature=-crt-static"]
|
||||
16
components/ordhook-sdk-js/.npmignore
Normal file
16
components/ordhook-sdk-js/.npmignore
Normal file
@@ -0,0 +1,16 @@
|
||||
target
|
||||
Cargo.toml
|
||||
Cargo.lock
|
||||
.cargo
|
||||
.github
|
||||
npm
|
||||
src
|
||||
.eslintrc
|
||||
.prettierignore
|
||||
rustfmt.toml
|
||||
yarn.lock
|
||||
**/*.rs
|
||||
*.node
|
||||
.yarn
|
||||
__test__
|
||||
renovate.json
|
||||
BIN
components/ordhook-sdk-js/.yarn/install-state.gz
Normal file
BIN
components/ordhook-sdk-js/.yarn/install-state.gz
Normal file
Binary file not shown.
874
components/ordhook-sdk-js/.yarn/releases/yarn-3.6.4.cjs
vendored
Executable file
874
components/ordhook-sdk-js/.yarn/releases/yarn-3.6.4.cjs
vendored
Executable file
File diff suppressed because one or more lines are too long
3
components/ordhook-sdk-js/.yarnrc.yml
Normal file
3
components/ordhook-sdk-js/.yarnrc.yml
Normal file
@@ -0,0 +1,3 @@
|
||||
nodeLinker: node-modules
|
||||
|
||||
yarnPath: .yarn/releases/yarn-3.6.4.cjs
|
||||
4755
components/ordhook-sdk-js/Cargo.lock
generated
4755
components/ordhook-sdk-js/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,24 +1,30 @@
|
||||
[package]
|
||||
name = "ordhook-sdk-js"
|
||||
version = "0.5.0"
|
||||
edition = "2021"
|
||||
exclude = ["index.node"]
|
||||
name = "ordhook-sdk-js"
|
||||
version = "0.6.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
serde = "1"
|
||||
error-chain = "0.12"
|
||||
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
|
||||
napi = { version = "2.12.2", default-features = false, features = ["napi4", "async", "tokio_rt", "serde-json"] }
|
||||
napi-derive = "2.12.2"
|
||||
crossbeam-channel = "0.5.6"
|
||||
ordhook = { path = "../ordhook-core" }
|
||||
hiro-system-kit = "0.3.1"
|
||||
crossbeam-channel = "0.5.6"
|
||||
serde_json = "1"
|
||||
serde = "1"
|
||||
|
||||
[dependencies.neon]
|
||||
version = "0.9.1"
|
||||
default-features = false
|
||||
features = ["napi-4", "channel-api", "event-queue-api", "try-catch-api"]
|
||||
[build-dependencies]
|
||||
napi-build = "2.0.1"
|
||||
|
||||
[dependencies.num]
|
||||
version = "0.2"
|
||||
default-features = false
|
||||
[build]
|
||||
target = "armv7-unknown-linux-gnueabihf"
|
||||
rustflags = ["-C", "link-args=-L/lib/arm-linux-gnueabihf"]
|
||||
|
||||
[target.armv7-unknown-linux-gnueabihf]
|
||||
linker = "arm-linux-gnueabihf-g++"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { OrdinalsIndexer } from "./index";
|
||||
import test from 'ava'
|
||||
|
||||
import { OrdinalsIndexer } from "../index.js";
|
||||
|
||||
const indexer = new OrdinalsIndexer({
|
||||
bitcoinRpcUrl: 'http://0.0.0.0:8332',
|
||||
@@ -8,23 +10,12 @@ const indexer = new OrdinalsIndexer({
|
||||
logs: false
|
||||
});
|
||||
|
||||
indexer.applyBlock(block => {
|
||||
indexer.onBlock(block => {
|
||||
console.log(`Hello from JS ${JSON.stringify(block)}`);
|
||||
});
|
||||
|
||||
indexer.undoBlock(block => {
|
||||
indexer.onBlockRollBack(block => {
|
||||
console.log(`Hello from JS ${JSON.stringify(block)}`);
|
||||
});
|
||||
|
||||
|
||||
// indexer.streamBlocks();
|
||||
|
||||
indexer.dropBlocks([32103, 32104]);
|
||||
|
||||
indexer.rewriteBlocks([32103, 32104]);
|
||||
|
||||
indexer.syncBlocks();
|
||||
|
||||
indexer.replayBlocks([32103, 32104]);
|
||||
|
||||
indexer.terminate();
|
||||
indexer.replayBlocks([767430, 767431]);
|
||||
5
components/ordhook-sdk-js/build.rs
Normal file
5
components/ordhook-sdk-js/build.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
extern crate napi_build;
|
||||
|
||||
fn main() {
|
||||
napi_build::setup();
|
||||
}
|
||||
21
components/ordhook-sdk-js/index.d.ts
vendored
Normal file
21
components/ordhook-sdk-js/index.d.ts
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
export interface OrdinalsIndexerConfig {
|
||||
bitcoinRpcUrl?: string
|
||||
bitcoinRpcUsername?: string
|
||||
bitcoinRpcPassword?: string
|
||||
workingDir?: string
|
||||
logsEnabled?: boolean
|
||||
}
|
||||
export class OrdinalsIndexer {
|
||||
constructor(configOverrides?: OrdinalsIndexerConfig | undefined | null)
|
||||
onBlock(callback: (block: any) => boolean): void
|
||||
onBlockRollBack(callback: (block: any) => boolean): void
|
||||
streamBlocks(): void
|
||||
replayBlocks(blocks: Array<number>): void
|
||||
replayBlockRange(startBlock: number, endBlock: number): void
|
||||
terminate(): void
|
||||
}
|
||||
257
components/ordhook-sdk-js/index.js
Normal file
257
components/ordhook-sdk-js/index.js
Normal file
@@ -0,0 +1,257 @@
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
/* prettier-ignore */
|
||||
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
const { existsSync, readFileSync } = require('fs')
|
||||
const { join } = require('path')
|
||||
|
||||
const { platform, arch } = process
|
||||
|
||||
let nativeBinding = null
|
||||
let localFileExisted = false
|
||||
let loadError = null
|
||||
|
||||
function isMusl() {
|
||||
// For Node 10
|
||||
if (!process.report || typeof process.report.getReport !== 'function') {
|
||||
try {
|
||||
const lddPath = require('child_process').execSync('which ldd').toString().trim()
|
||||
return readFileSync(lddPath, 'utf8').includes('musl')
|
||||
} catch (e) {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
const { glibcVersionRuntime } = process.report.getReport().header
|
||||
return !glibcVersionRuntime
|
||||
}
|
||||
}
|
||||
|
||||
switch (platform) {
|
||||
case 'android':
|
||||
switch (arch) {
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(join(__dirname, 'ordhook-sdk-js.android-arm64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.android-arm64.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-android-arm64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'arm':
|
||||
localFileExisted = existsSync(join(__dirname, 'ordhook-sdk-js.android-arm-eabi.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.android-arm-eabi.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-android-arm-eabi')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Android ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'win32':
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'ordhook-sdk-js.win32-x64-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.win32-x64-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-win32-x64-msvc')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'ia32':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'ordhook-sdk-js.win32-ia32-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.win32-ia32-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-win32-ia32-msvc')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'ordhook-sdk-js.win32-arm64-msvc.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.win32-arm64-msvc.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-win32-arm64-msvc')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Windows: ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'darwin':
|
||||
localFileExisted = existsSync(join(__dirname, 'ordhook-sdk-js.darwin-universal.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.darwin-universal.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-darwin-universal')
|
||||
}
|
||||
break
|
||||
} catch {}
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
localFileExisted = existsSync(join(__dirname, 'ordhook-sdk-js.darwin-x64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.darwin-x64.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-darwin-x64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'ordhook-sdk-js.darwin-arm64.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.darwin-arm64.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-darwin-arm64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on macOS: ${arch}`)
|
||||
}
|
||||
break
|
||||
case 'freebsd':
|
||||
if (arch !== 'x64') {
|
||||
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
|
||||
}
|
||||
localFileExisted = existsSync(join(__dirname, 'ordhook-sdk-js.freebsd-x64.node'))
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.freebsd-x64.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-freebsd-x64')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
case 'linux':
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'ordhook-sdk-js.linux-x64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.linux-x64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-linux-x64-musl')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'ordhook-sdk-js.linux-x64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.linux-x64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-linux-x64-gnu')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'arm64':
|
||||
if (isMusl()) {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'ordhook-sdk-js.linux-arm64-musl.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.linux-arm64-musl.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-linux-arm64-musl')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
} else {
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'ordhook-sdk-js.linux-arm64-gnu.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.linux-arm64-gnu.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-linux-arm64-gnu')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'arm':
|
||||
localFileExisted = existsSync(
|
||||
join(__dirname, 'ordhook-sdk-js.linux-arm-gnueabihf.node')
|
||||
)
|
||||
try {
|
||||
if (localFileExisted) {
|
||||
nativeBinding = require('./ordhook-sdk-js.linux-arm-gnueabihf.node')
|
||||
} else {
|
||||
nativeBinding = require('@hirosystems/ordhook-sdk-js-linux-arm-gnueabihf')
|
||||
}
|
||||
} catch (e) {
|
||||
loadError = e
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported architecture on Linux: ${arch}`)
|
||||
}
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadError) {
|
||||
throw loadError
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { OrdinalsIndexer } = nativeBinding
|
||||
|
||||
module.exports.OrdinalsIndexer = OrdinalsIndexer
|
||||
@@ -1,108 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
const {
|
||||
ordinalsIndexerNew,
|
||||
ordinalsIndexerStreamBlocks,
|
||||
ordinalsIndexerReplayBlocks,
|
||||
ordinalsIndexerDropBlocks,
|
||||
ordinalsIndexerSyncBlocks,
|
||||
ordinalsIndexerRewriteBlocks,
|
||||
ordinalsIndexerOnBlockApply,
|
||||
ordinalsIndexerOnBlockUndo,
|
||||
ordinalsIndexerTerminate,
|
||||
} = require("../native/index.node");
|
||||
|
||||
// import {
|
||||
// BitcoinChainUpdate,
|
||||
// Block,
|
||||
// StacksBlockMetadata,
|
||||
// StacksBlockUpdate,
|
||||
// StacksChainUpdate,
|
||||
// StacksTransaction,
|
||||
// StacksTransactionMetadata,
|
||||
// Transaction,
|
||||
// } from "@hirosystems/chainhook-types";
|
||||
// export * from "@hirosystems/chainhook-types";
|
||||
|
||||
export class OrdinalsIndexer {
|
||||
handle: any;
|
||||
|
||||
/**
|
||||
* @summary Construct a new OrdinalsIndexer
|
||||
* @param
|
||||
* @memberof OrdinalsIndexer
|
||||
*/
|
||||
constructor(settings: {
|
||||
bitcoinRpcUrl: string,
|
||||
bitcoinRpcUsername: string,
|
||||
bitcoinRpcPassword: string,
|
||||
workingDirectory: string,
|
||||
logs: boolean,
|
||||
}) {
|
||||
this.handle = ordinalsIndexerNew(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Start streaming blocks
|
||||
* @memberof OrdinalsIndexer
|
||||
*/
|
||||
streamBlocks() {
|
||||
return ordinalsIndexerStreamBlocks.call(this.handle);
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Drop a set of blocks
|
||||
* @memberof OrdinalsIndexer
|
||||
*/
|
||||
dropBlocks(blocks: number[]) {
|
||||
return ordinalsIndexerDropBlocks.call(this.handle, blocks);
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Drop, downloard and re-index a set of blocks
|
||||
* @memberof OrdinalsIndexer
|
||||
*/
|
||||
rewriteBlocks(blocks: number[]) {
|
||||
return ordinalsIndexerRewriteBlocks.call(this.handle, blocks);
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Replay a set of blocks
|
||||
* @memberof OrdinalsIndexer
|
||||
*/
|
||||
replayBlocks(blocks: number[]) {
|
||||
return ordinalsIndexerReplayBlocks.call(this.handle, blocks);
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Download and index blocks
|
||||
* @memberof OrdinalsIndexer
|
||||
*/
|
||||
syncBlocks() {
|
||||
return ordinalsIndexerSyncBlocks.call(this.handle);
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Apply Block
|
||||
* @memberof OrdinalsIndexer
|
||||
*/
|
||||
applyBlock(callback: (block: any) => void) {
|
||||
return ordinalsIndexerOnBlockApply.call(this.handle, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Undo Block
|
||||
* @memberof OrdinalsIndexer
|
||||
*/
|
||||
undoBlock(callback: (block: any) => void) {
|
||||
return ordinalsIndexerOnBlockUndo.call(this.handle, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Terminates indexer
|
||||
* @memberof DevnetNetworkOrchestrator
|
||||
*/
|
||||
terminate() {
|
||||
return ordinalsIndexerTerminate.call(this.handle);
|
||||
}
|
||||
}
|
||||
3
components/ordhook-sdk-js/npm/darwin-arm64/README.md
Normal file
3
components/ordhook-sdk-js/npm/darwin-arm64/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `@hirosystems/ordhook-sdk-js-darwin-arm64`
|
||||
|
||||
This is the **aarch64-apple-darwin** binary for `@hirosystems/ordhook-sdk-js`
|
||||
18
components/ordhook-sdk-js/npm/darwin-arm64/package.json
Normal file
18
components/ordhook-sdk-js/npm/darwin-arm64/package.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "@hirosystems/ordhook-sdk-js-darwin-arm64",
|
||||
"version": "0.6.2",
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"main": "ordhook-sdk-js.darwin-arm64.node",
|
||||
"files": [
|
||||
"ordhook-sdk-js.darwin-arm64.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
3
components/ordhook-sdk-js/npm/darwin-universal/README.md
Normal file
3
components/ordhook-sdk-js/npm/darwin-universal/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `@hirosystems/ordhook-sdk-js-darwin-universal`
|
||||
|
||||
This is the **universal-apple-darwin** binary for `@hirosystems/ordhook-sdk-js`
|
||||
15
components/ordhook-sdk-js/npm/darwin-universal/package.json
Normal file
15
components/ordhook-sdk-js/npm/darwin-universal/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@hirosystems/ordhook-sdk-js-darwin-universal",
|
||||
"version": "0.6.2",
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"main": "ordhook-sdk-js.darwin-universal.node",
|
||||
"files": [
|
||||
"ordhook-sdk-js.darwin-universal.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
3
components/ordhook-sdk-js/npm/darwin-x64/README.md
Normal file
3
components/ordhook-sdk-js/npm/darwin-x64/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `@hirosystems/ordhook-sdk-js-darwin-x64`
|
||||
|
||||
This is the **x86_64-apple-darwin** binary for `@hirosystems/ordhook-sdk-js`
|
||||
18
components/ordhook-sdk-js/npm/darwin-x64/package.json
Normal file
18
components/ordhook-sdk-js/npm/darwin-x64/package.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "@hirosystems/ordhook-sdk-js-darwin-x64",
|
||||
"version": "0.6.2",
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"main": "ordhook-sdk-js.darwin-x64.node",
|
||||
"files": [
|
||||
"ordhook-sdk-js.darwin-x64.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
3
components/ordhook-sdk-js/npm/linux-x64-gnu/README.md
Normal file
3
components/ordhook-sdk-js/npm/linux-x64-gnu/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `@hirosystems/ordhook-sdk-js-linux-x64-gnu`
|
||||
|
||||
This is the **x86_64-unknown-linux-gnu** binary for `@hirosystems/ordhook-sdk-js`
|
||||
21
components/ordhook-sdk-js/npm/linux-x64-gnu/package.json
Normal file
21
components/ordhook-sdk-js/npm/linux-x64-gnu/package.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "@hirosystems/ordhook-sdk-js-linux-x64-gnu",
|
||||
"version": "0.6.2",
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"main": "ordhook-sdk-js.linux-x64-gnu.node",
|
||||
"files": [
|
||||
"ordhook-sdk-js.linux-x64-gnu.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"libc": [
|
||||
"glibc"
|
||||
]
|
||||
}
|
||||
2552
components/ordhook-sdk-js/package-lock.json
generated
2552
components/ordhook-sdk-js/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,48 +1,42 @@
|
||||
{
|
||||
"name": "@hirosystems/ordhook-sdk-js",
|
||||
"version": "1.7.1",
|
||||
"description": "ordhook-sdk-js is a library for writing protocols .",
|
||||
"author": "Ludo Galabru",
|
||||
"repository": "https://github.com/hirosystems/ordhook/tree/main/components/ordhook-sdk-js",
|
||||
"license": "GPL-3.0",
|
||||
"main": "dist/index.js",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc --build && cargo-cp-artifact -nc native/index.node -- cargo build --message-format=json-render-diagnostics",
|
||||
"build-debug": "npm run build --",
|
||||
"build-release": "npm run build -- --release",
|
||||
"build-linux-x64-glibc": "npm run build-release -- --target x86_64-unknown-linux-gnu",
|
||||
"build-linux-x64-musl": "npm run build-release -- --target x86_64-unknown-linux-musl",
|
||||
"build-windows-x64": "npm run build-release -- --target x86_64-pc-windows-msvc",
|
||||
"build-darwin-x64": "npm run build-release -- --target x86_64-apple-darwin",
|
||||
"build-darwin-arm64": "npm run build-release -- --target aarch64-apple-darwin",
|
||||
"install": "node-pre-gyp install --fallback-to-build=false || npm run build-release",
|
||||
"lint": "eslint .",
|
||||
"package": "node-pre-gyp package",
|
||||
"spec": "jest",
|
||||
"test": "npm run build && npm run spec",
|
||||
"upload-binary": "npm run build-release && node-pre-gyp package && node-pre-gyp-github publish",
|
||||
"version": "npm run build-release"
|
||||
"version": "0.6.3",
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/hirosystems/ordhook",
|
||||
"directory": "components/ordhook-sdk-js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@hirosystems/chainhook-types": "^1.1.2",
|
||||
"@mapbox/node-pre-gyp": "^1.0.8",
|
||||
"neon-cli": "^0.9.1",
|
||||
"node-pre-gyp-github": "^1.4.3",
|
||||
"typescript": "^4.5.5"
|
||||
"types": "index.d.ts",
|
||||
"napi": {
|
||||
"name": "ordhook-sdk-js",
|
||||
"triples": {
|
||||
"additional": [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"universal-apple-darwin"
|
||||
]
|
||||
}
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@types/node": "^16.11.11",
|
||||
"cargo-cp-artifact": "^0.1"
|
||||
"@napi-rs/cli": "^2.16.3",
|
||||
"ava": "^5.1.1"
|
||||
},
|
||||
"binary": {
|
||||
"module_name": "index",
|
||||
"host": "https://github.com/hirosystems/clarinet/releases/download/",
|
||||
"remote_path": "v{version}",
|
||||
"package_name": "stacks-devnet-js-{platform}-{arch}-{libc}.tar.gz",
|
||||
"module_path": "./native",
|
||||
"pkg_path": "."
|
||||
}
|
||||
"ava": {
|
||||
"timeout": "45m"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"scripts": {
|
||||
"artifacts": "napi artifacts",
|
||||
"build": "napi build --platform --release",
|
||||
"build:debug": "napi build --platform",
|
||||
"prepublishOnly": "napi prepublish -t npm",
|
||||
"test": "ava",
|
||||
"universal": "napi universal",
|
||||
"version": "napi version"
|
||||
},
|
||||
"packageManager": "yarn@3.6.4"
|
||||
}
|
||||
2
components/ordhook-sdk-js/rustfmt.toml
Normal file
2
components/ordhook-sdk-js/rustfmt.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
tab_spaces = 2
|
||||
edition = "2021"
|
||||
@@ -1,444 +1,6 @@
|
||||
#![deny(clippy::all)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate error_chain;
|
||||
extern crate napi_derive;
|
||||
|
||||
mod serde;
|
||||
|
||||
use core::panic;
|
||||
use crossbeam_channel::{select, Sender};
|
||||
use neon::prelude::*;
|
||||
use ordhook::chainhook_sdk::observer::DataHandlerEvent;
|
||||
use ordhook::chainhook_sdk::utils::Context as OrdhookContext;
|
||||
use ordhook::config::Config;
|
||||
use ordhook::service::Service;
|
||||
use std::thread;
|
||||
|
||||
struct OrdinalsIndexerConfig {
|
||||
pub bitcoin_rpc_url: String,
|
||||
pub bitcoin_rpc_username: String,
|
||||
pub bitcoin_rpc_password: String,
|
||||
pub working_directory: String,
|
||||
pub logs_enabled: bool,
|
||||
}
|
||||
|
||||
impl OrdinalsIndexerConfig {
|
||||
pub fn default() -> OrdinalsIndexerConfig {
|
||||
OrdinalsIndexerConfig {
|
||||
bitcoin_rpc_url: "http://0.0.0.0:8332".to_string(),
|
||||
bitcoin_rpc_username: "devnet".to_string(),
|
||||
bitcoin_rpc_password: "devnet".to_string(),
|
||||
working_directory: "/tmp/ordinals".to_string(),
|
||||
logs_enabled: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct OrdinalsIndexer {
|
||||
command_tx: Sender<IndexerCommand>,
|
||||
custom_indexer_command_tx: Sender<CustomIndexerCommand>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
enum IndexerCommand {
|
||||
StreamBlocks,
|
||||
SyncBlocks,
|
||||
DropBlocks(Vec<u64>),
|
||||
RewriteBlocks(Vec<u64>),
|
||||
ReplayBlocks(Vec<u64>),
|
||||
Terminate,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
enum CustomIndexerCommand {
|
||||
UpdateApplyCallback(Root<JsFunction>),
|
||||
UpdateUndoCallback(Root<JsFunction>),
|
||||
Terminate,
|
||||
}
|
||||
|
||||
impl Finalize for OrdinalsIndexer {}
|
||||
|
||||
impl OrdinalsIndexer {
|
||||
fn new<'a, C>(cx: &mut C, ordhook_config: Config) -> Self
|
||||
where
|
||||
C: Context<'a>,
|
||||
{
|
||||
let (command_tx, command_rx) = crossbeam_channel::unbounded();
|
||||
let (custom_indexer_command_tx, custom_indexer_command_rx) = crossbeam_channel::unbounded();
|
||||
|
||||
let logger = hiro_system_kit::log::setup_logger();
|
||||
let _guard = hiro_system_kit::log::setup_global_logger(logger.clone());
|
||||
let ctx = OrdhookContext {
|
||||
logger: Some(logger),
|
||||
tracer: false,
|
||||
};
|
||||
|
||||
// Initialize service
|
||||
// {
|
||||
// let _ = initialize_ordhook_db(&ordhook_config.expected_cache_path(), &ctx);
|
||||
// let _ = open_readwrite_ordhook_db_conn_rocks_db(&ordhook_config.expected_cache_path(), &ctx);
|
||||
// }
|
||||
let mut service: Service = Service::new(ordhook_config, ctx);
|
||||
|
||||
// Set-up the observer sidecar - used for augmenting the bitcoin blocks with
|
||||
// ordinals informations
|
||||
let observer_sidecar = service
|
||||
.set_up_observer_sidecar_runloop()
|
||||
.expect("unable to setup indexer");
|
||||
// Prepare internal predicate
|
||||
let (observer_config, payload_rx) = service
|
||||
.set_up_observer_config(vec![], true)
|
||||
.expect("unable to setup indexer");
|
||||
|
||||
// Indexing thread
|
||||
let channel = cx.channel();
|
||||
thread::spawn(move || {
|
||||
let payload_rx = payload_rx.unwrap();
|
||||
|
||||
channel.send(move |mut cx| {
|
||||
let mut apply_callback: Option<Root<JsFunction>> = None;
|
||||
let mut undo_callback: Option<Root<JsFunction>> = None;
|
||||
|
||||
loop {
|
||||
select! {
|
||||
recv(payload_rx) -> msg => {
|
||||
match msg {
|
||||
Ok(DataHandlerEvent::Process(payload)) => {
|
||||
if let Some(ref callback) = undo_callback {
|
||||
for to_rollback in payload.rollback.into_iter() {
|
||||
let callback = callback.clone(&mut cx).into_inner(&mut cx);
|
||||
let this = cx.undefined();
|
||||
let payload = serde::to_value(&mut cx, &to_rollback).expect("Unable to serialize block");
|
||||
let args: Vec<Handle<JsValue>> = vec![payload];
|
||||
callback.call(&mut cx, this, args)?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref callback) = apply_callback {
|
||||
for to_apply in payload.apply.into_iter() {
|
||||
let callback = callback.clone(&mut cx).into_inner(&mut cx);
|
||||
let this = cx.undefined();
|
||||
let payload = serde::to_value(&mut cx, &to_apply).expect("Unable to serialize block");
|
||||
let args: Vec<Handle<JsValue>> = vec![payload];
|
||||
callback.call(&mut cx, this, args)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(DataHandlerEvent::Terminate) => {
|
||||
return Ok(());
|
||||
}
|
||||
_ => {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
recv(custom_indexer_command_rx) -> msg => {
|
||||
match msg {
|
||||
Ok(CustomIndexerCommand::UpdateApplyCallback(callback)) => {
|
||||
apply_callback = Some(callback);
|
||||
}
|
||||
Ok(CustomIndexerCommand::UpdateUndoCallback(callback)) => {
|
||||
undo_callback = Some(callback);
|
||||
}
|
||||
Ok(CustomIndexerCommand::Terminate) => {
|
||||
return Ok(())
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Processing thread
|
||||
thread::spawn(move || {
|
||||
loop {
|
||||
let cmd = match command_rx.recv() {
|
||||
Ok(cmd) => cmd,
|
||||
Err(e) => {
|
||||
panic!("Runloop error: {}", e.to_string());
|
||||
}
|
||||
};
|
||||
|
||||
match cmd {
|
||||
IndexerCommand::StreamBlocks => {
|
||||
// We start the service as soon as the start() method is being called.
|
||||
let future = service.catch_up_with_chain_tip(false, &observer_config);
|
||||
let _ = hiro_system_kit::nestable_block_on(future)
|
||||
.expect("unable to start indexer");
|
||||
let future = service.start_event_observer(observer_sidecar);
|
||||
let (command_tx, event_rx) = hiro_system_kit::nestable_block_on(future)
|
||||
.expect("unable to start indexer");
|
||||
// Blocking call
|
||||
let _ = service.start_main_runloop(&command_tx, event_rx, None);
|
||||
break;
|
||||
}
|
||||
IndexerCommand::ReplayBlocks(blocks) => {
|
||||
println!("Will replay blocks {:?}", blocks);
|
||||
}
|
||||
IndexerCommand::DropBlocks(blocks) => {
|
||||
println!("Will drop blocks {:?}", blocks);
|
||||
}
|
||||
IndexerCommand::RewriteBlocks(blocks) => {
|
||||
println!("Will rewrite blocks {:?}", blocks);
|
||||
}
|
||||
IndexerCommand::SyncBlocks => {
|
||||
println!("Will sync blocks");
|
||||
}
|
||||
IndexerCommand::Terminate => {
|
||||
std::process::exit(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
command_tx,
|
||||
custom_indexer_command_tx,
|
||||
// termination_rx,
|
||||
}
|
||||
}
|
||||
|
||||
fn stream_blocks(&self) -> Result<bool, String> {
|
||||
let _ = self.command_tx.send(IndexerCommand::StreamBlocks);
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn terminate(&self) -> Result<bool, String> {
|
||||
let _ = self.command_tx.send(IndexerCommand::Terminate);
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn replay_blocks(&self, blocks: Vec<u64>) -> Result<bool, String> {
|
||||
let _ = self.command_tx.send(IndexerCommand::ReplayBlocks(blocks));
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn drop_blocks(&self, blocks: Vec<u64>) -> Result<bool, String> {
|
||||
let _ = self.command_tx.send(IndexerCommand::DropBlocks(blocks));
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn rewrite_blocks(&self, blocks: Vec<u64>) -> Result<bool, String> {
|
||||
let _ = self.command_tx.send(IndexerCommand::RewriteBlocks(blocks));
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn sync_blocks(&self) -> Result<bool, String> {
|
||||
let _ = self.command_tx.send(IndexerCommand::SyncBlocks);
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn update_apply_callback(&self, callback: Root<JsFunction>) -> Result<bool, String> {
|
||||
let _ = self
|
||||
.custom_indexer_command_tx
|
||||
.send(CustomIndexerCommand::UpdateApplyCallback(callback));
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn update_undo_callback(&self, callback: Root<JsFunction>) -> Result<bool, String> {
|
||||
let _ = self
|
||||
.custom_indexer_command_tx
|
||||
.send(CustomIndexerCommand::UpdateUndoCallback(callback));
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl OrdinalsIndexer {
|
||||
fn js_new(mut cx: FunctionContext) -> JsResult<JsBox<OrdinalsIndexer>> {
|
||||
let settings = cx.argument::<JsObject>(0)?;
|
||||
|
||||
let mut config = OrdinalsIndexerConfig::default();
|
||||
|
||||
if let Ok(res) = settings
|
||||
.get(&mut cx, "bitcoinRpcUrl")?
|
||||
.downcast::<JsString, _>(&mut cx)
|
||||
{
|
||||
config.bitcoin_rpc_url = res.value(&mut cx);
|
||||
}
|
||||
if let Ok(res) = settings
|
||||
.get(&mut cx, "bitcoinRpcUsername")?
|
||||
.downcast::<JsString, _>(&mut cx)
|
||||
{
|
||||
config.bitcoin_rpc_username = res.value(&mut cx);
|
||||
}
|
||||
|
||||
if let Ok(res) = settings
|
||||
.get(&mut cx, "bitcoinRpcPassword")?
|
||||
.downcast::<JsString, _>(&mut cx)
|
||||
{
|
||||
config.bitcoin_rpc_password = res.value(&mut cx);
|
||||
}
|
||||
|
||||
if let Ok(res) = settings
|
||||
.get(&mut cx, "workingDirectory")?
|
||||
.downcast::<JsString, _>(&mut cx)
|
||||
{
|
||||
config.working_directory = res.value(&mut cx);
|
||||
}
|
||||
|
||||
if let Ok(res) = settings
|
||||
.get(&mut cx, "logs")?
|
||||
.downcast::<JsBoolean, _>(&mut cx)
|
||||
{
|
||||
config.logs_enabled = res.value(&mut cx);
|
||||
}
|
||||
|
||||
let mut ordhook_config = Config::mainnet_default();
|
||||
ordhook_config.network.bitcoind_rpc_username = config.bitcoin_rpc_username.clone();
|
||||
ordhook_config.network.bitcoind_rpc_password = config.bitcoin_rpc_password.clone();
|
||||
ordhook_config.network.bitcoind_rpc_url = config.bitcoin_rpc_url.clone();
|
||||
ordhook_config.storage.working_dir = config.working_directory.clone();
|
||||
ordhook_config.logs.chainhook_internals = config.logs_enabled;
|
||||
ordhook_config.logs.ordinals_internals = config.logs_enabled;
|
||||
|
||||
let devnet: OrdinalsIndexer = OrdinalsIndexer::new(&mut cx, ordhook_config);
|
||||
Ok(cx.boxed(devnet))
|
||||
}
|
||||
|
||||
fn js_stream_blocks(mut cx: FunctionContext) -> JsResult<JsUndefined> {
|
||||
cx.this()
|
||||
.downcast_or_throw::<JsBox<OrdinalsIndexer>, _>(&mut cx)?
|
||||
.stream_blocks()
|
||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||
|
||||
Ok(cx.undefined())
|
||||
}
|
||||
|
||||
fn js_replay_blocks(mut cx: FunctionContext) -> JsResult<JsUndefined> {
|
||||
let blocks = {
|
||||
let seq = cx
|
||||
.argument::<JsArray>(0)?
|
||||
.root(&mut cx)
|
||||
.into_inner(&mut cx)
|
||||
.to_vec(&mut cx)?;
|
||||
let mut blocks = vec![];
|
||||
for item in seq.iter() {
|
||||
let block = item.downcast::<JsNumber, _>(&mut cx).unwrap();
|
||||
blocks.push(block.value(&mut cx) as u64);
|
||||
}
|
||||
blocks
|
||||
};
|
||||
|
||||
cx.this()
|
||||
.downcast_or_throw::<JsBox<OrdinalsIndexer>, _>(&mut cx)?
|
||||
.replay_blocks(blocks)
|
||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||
|
||||
Ok(cx.undefined())
|
||||
}
|
||||
|
||||
fn js_drop_blocks(mut cx: FunctionContext) -> JsResult<JsUndefined> {
|
||||
let blocks = {
|
||||
let seq = cx
|
||||
.argument::<JsArray>(0)?
|
||||
.root(&mut cx)
|
||||
.into_inner(&mut cx)
|
||||
.to_vec(&mut cx)?;
|
||||
let mut blocks = vec![];
|
||||
for item in seq.iter() {
|
||||
let block = item.downcast::<JsNumber, _>(&mut cx).unwrap();
|
||||
blocks.push(block.value(&mut cx) as u64);
|
||||
}
|
||||
blocks
|
||||
};
|
||||
|
||||
cx.this()
|
||||
.downcast_or_throw::<JsBox<OrdinalsIndexer>, _>(&mut cx)?
|
||||
.drop_blocks(blocks)
|
||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||
|
||||
Ok(cx.undefined())
|
||||
}
|
||||
|
||||
fn js_sync_blocks(mut cx: FunctionContext) -> JsResult<JsUndefined> {
|
||||
cx.this()
|
||||
.downcast_or_throw::<JsBox<OrdinalsIndexer>, _>(&mut cx)?
|
||||
.sync_blocks()
|
||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||
|
||||
Ok(cx.undefined())
|
||||
}
|
||||
|
||||
fn js_rewrite_blocks(mut cx: FunctionContext) -> JsResult<JsUndefined> {
|
||||
let blocks = {
|
||||
let seq = cx
|
||||
.argument::<JsArray>(0)?
|
||||
.root(&mut cx)
|
||||
.into_inner(&mut cx)
|
||||
.to_vec(&mut cx)?;
|
||||
let mut blocks = vec![];
|
||||
for item in seq.iter() {
|
||||
let block = item.downcast::<JsNumber, _>(&mut cx).unwrap();
|
||||
blocks.push(block.value(&mut cx) as u64);
|
||||
}
|
||||
blocks
|
||||
};
|
||||
|
||||
cx.this()
|
||||
.downcast_or_throw::<JsBox<OrdinalsIndexer>, _>(&mut cx)?
|
||||
.rewrite_blocks(blocks)
|
||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||
|
||||
Ok(cx.undefined())
|
||||
}
|
||||
|
||||
fn js_terminate(mut cx: FunctionContext) -> JsResult<JsUndefined> {
|
||||
cx.this()
|
||||
.downcast_or_throw::<JsBox<OrdinalsIndexer>, _>(&mut cx)?
|
||||
.terminate()
|
||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||
|
||||
Ok(cx.undefined())
|
||||
}
|
||||
|
||||
fn js_on_block_apply(mut cx: FunctionContext) -> JsResult<JsUndefined> {
|
||||
let callback = cx.argument::<JsFunction>(0)?.root(&mut cx);
|
||||
|
||||
cx.this()
|
||||
.downcast_or_throw::<JsBox<OrdinalsIndexer>, _>(&mut cx)?
|
||||
.update_apply_callback(callback)
|
||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||
|
||||
Ok(cx.undefined())
|
||||
}
|
||||
|
||||
fn js_on_block_undo(mut cx: FunctionContext) -> JsResult<JsUndefined> {
|
||||
let callback = cx.argument::<JsFunction>(0)?.root(&mut cx);
|
||||
|
||||
cx.this()
|
||||
.downcast_or_throw::<JsBox<OrdinalsIndexer>, _>(&mut cx)?
|
||||
.update_undo_callback(callback)
|
||||
.or_else(|err| cx.throw_error(err.to_string()))?;
|
||||
|
||||
Ok(cx.undefined())
|
||||
}
|
||||
}
|
||||
|
||||
#[neon::main]
|
||||
fn main(mut cx: ModuleContext) -> NeonResult<()> {
|
||||
cx.export_function("ordinalsIndexerNew", OrdinalsIndexer::js_new)?;
|
||||
cx.export_function(
|
||||
"ordinalsIndexerStreamBlocks",
|
||||
OrdinalsIndexer::js_stream_blocks,
|
||||
)?;
|
||||
cx.export_function(
|
||||
"ordinalsIndexerReplayBlocks",
|
||||
OrdinalsIndexer::js_replay_blocks,
|
||||
)?;
|
||||
cx.export_function("ordinalsIndexerDropBlocks", OrdinalsIndexer::js_drop_blocks)?;
|
||||
cx.export_function("ordinalsIndexerSyncBlocks", OrdinalsIndexer::js_sync_blocks)?;
|
||||
cx.export_function(
|
||||
"ordinalsIndexerRewriteBlocks",
|
||||
OrdinalsIndexer::js_rewrite_blocks,
|
||||
)?;
|
||||
cx.export_function("ordinalsIndexerTerminate", OrdinalsIndexer::js_terminate)?;
|
||||
cx.export_function(
|
||||
"ordinalsIndexerOnBlockApply",
|
||||
OrdinalsIndexer::js_on_block_apply,
|
||||
)?;
|
||||
cx.export_function(
|
||||
"ordinalsIndexerOnBlockUndo",
|
||||
OrdinalsIndexer::js_on_block_undo,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
mod ordinals_indexer;
|
||||
|
||||
360
components/ordhook-sdk-js/src/ordinals_indexer.rs
Normal file
360
components/ordhook-sdk-js/src/ordinals_indexer.rs
Normal file
@@ -0,0 +1,360 @@
|
||||
use core::panic;
|
||||
use crossbeam_channel::Sender;
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi::threadsafe_function::{
|
||||
ErrorStrategy, ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode,
|
||||
};
|
||||
use ordhook::chainhook_sdk::chainhooks::bitcoin::BitcoinTransactionPayload;
|
||||
use ordhook::chainhook_sdk::chainhooks::types::{
|
||||
BitcoinChainhookFullSpecification, BitcoinChainhookNetworkSpecification, BitcoinPredicateType,
|
||||
HookAction, OrdinalOperations,
|
||||
};
|
||||
use ordhook::chainhook_sdk::observer::DataHandlerEvent;
|
||||
use ordhook::chainhook_sdk::utils::{BlockHeights, Context as OrdhookContext};
|
||||
use ordhook::config::Config;
|
||||
use ordhook::scan::bitcoin::scan_bitcoin_chainstate_via_rpc_using_predicate;
|
||||
use ordhook::service::Service;
|
||||
use std::collections::BTreeMap;
|
||||
use std::thread;
|
||||
|
||||
enum IndexerCommand {
|
||||
StreamBlocks,
|
||||
ReplayBlocks(Vec<u64>),
|
||||
SyncBlocks,
|
||||
DropBlocks(Vec<u64>),
|
||||
RewriteBlocks(Vec<u64>),
|
||||
Terminate,
|
||||
}
|
||||
|
||||
type BlockJsHandler = ThreadsafeFunction<BitcoinTransactionPayload, ErrorStrategy::Fatal>;
|
||||
|
||||
#[allow(dead_code)]
|
||||
enum CustomIndexerCommand {
|
||||
UpdateApplyCallback(BlockJsHandler),
|
||||
UpdateUndoCallback(BlockJsHandler),
|
||||
Terminate,
|
||||
}
|
||||
|
||||
struct OrdinalsIndexingRunloop {
|
||||
pub command_tx: Sender<IndexerCommand>,
|
||||
pub custom_indexer_command_tx: Sender<CustomIndexerCommand>,
|
||||
}
|
||||
|
||||
impl OrdinalsIndexingRunloop {
|
||||
pub fn new(ordhook_config: Config) -> Self {
|
||||
let (command_tx, command_rx) = crossbeam_channel::unbounded();
|
||||
let (custom_indexer_command_tx, custom_indexer_command_rx) = crossbeam_channel::unbounded();
|
||||
|
||||
let logger = hiro_system_kit::log::setup_logger();
|
||||
let _guard = hiro_system_kit::log::setup_global_logger(logger.clone());
|
||||
let ctx = OrdhookContext {
|
||||
logger: Some(logger),
|
||||
tracer: false,
|
||||
};
|
||||
|
||||
// Initialize service
|
||||
// {
|
||||
// let _ = initialize_ordhook_db(&ordhook_config.expected_cache_path(), &ctx);
|
||||
// let _ = open_readwrite_ordhook_db_conn_rocks_db(&ordhook_config.expected_cache_path(), &ctx);
|
||||
// }
|
||||
let mut service: Service = Service::new(ordhook_config, ctx);
|
||||
|
||||
// Set-up the observer sidecar - used for augmenting the bitcoin blocks with
|
||||
// ordinals informations
|
||||
let observer_sidecar = service
|
||||
.set_up_observer_sidecar_runloop()
|
||||
.expect("unable to setup indexer");
|
||||
// Prepare internal predicate
|
||||
let (observer_config, payload_rx) = service
|
||||
.set_up_observer_config(vec![], true)
|
||||
.expect("unable to setup indexer");
|
||||
|
||||
// Indexing thread
|
||||
thread::spawn(move || {
|
||||
let payload_rx = payload_rx.unwrap();
|
||||
|
||||
let mut apply_callback: Option<BlockJsHandler> = None;
|
||||
|
||||
let mut undo_callback: Option<BlockJsHandler> = None;
|
||||
|
||||
loop {
|
||||
let mut sel = crossbeam_channel::Select::new();
|
||||
let payload_rx_sel = sel.recv(&payload_rx);
|
||||
let custom_indexer_command_rx_sel = sel.recv(&custom_indexer_command_rx);
|
||||
|
||||
// The second operation will be selected because it becomes ready first.
|
||||
let oper = sel.select();
|
||||
match oper.index() {
|
||||
i if i == payload_rx_sel => match oper.recv(&payload_rx) {
|
||||
Ok(DataHandlerEvent::Process(payload)) => {
|
||||
if let Some(callback) = undo_callback.clone() {
|
||||
for to_rollback in payload.rollback.into_iter() {
|
||||
loop {
|
||||
let (tx, rx) = crossbeam_channel::bounded(1);
|
||||
callback.call_with_return_value::<bool, _>(to_rollback.clone(), ThreadsafeFunctionCallMode::Blocking, move |p| {
|
||||
let _ = tx.send(p);
|
||||
Ok(())
|
||||
});
|
||||
match rx.recv() {
|
||||
Ok(true) => break,
|
||||
Ok(false) => continue,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref callback) = apply_callback.clone() {
|
||||
for to_apply in payload.apply.into_iter() {
|
||||
loop {
|
||||
let (tx, rx) = crossbeam_channel::bounded(1);
|
||||
callback.call_with_return_value::<bool, _>(to_apply.clone(), ThreadsafeFunctionCallMode::Blocking, move |p| {
|
||||
let _ = tx.send(p);
|
||||
Ok(())
|
||||
});
|
||||
match rx.recv() {
|
||||
Ok(true) => break,
|
||||
Ok(false) => continue,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(DataHandlerEvent::Terminate) => {
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Error {}", e.to_string());
|
||||
}
|
||||
},
|
||||
i if i == custom_indexer_command_rx_sel => match oper.recv(&custom_indexer_command_rx) {
|
||||
Ok(CustomIndexerCommand::UpdateApplyCallback(callback)) => {
|
||||
apply_callback = Some(callback);
|
||||
}
|
||||
Ok(CustomIndexerCommand::UpdateUndoCallback(callback)) => {
|
||||
undo_callback = Some(callback);
|
||||
}
|
||||
Ok(CustomIndexerCommand::Terminate) => break,
|
||||
_ => {}
|
||||
},
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// Processing thread
|
||||
thread::spawn(move || {
|
||||
loop {
|
||||
let cmd = match command_rx.recv() {
|
||||
Ok(cmd) => cmd,
|
||||
Err(e) => {
|
||||
panic!("Runloop error: {}", e.to_string());
|
||||
}
|
||||
};
|
||||
|
||||
match cmd {
|
||||
IndexerCommand::StreamBlocks => {
|
||||
// We start the service as soon as the start() method is being called.
|
||||
let future = service.catch_up_with_chain_tip(false, &observer_config);
|
||||
let _ = hiro_system_kit::nestable_block_on(future).expect("unable to start indexer");
|
||||
let future = service.start_event_observer(observer_sidecar);
|
||||
let (command_tx, event_rx) =
|
||||
hiro_system_kit::nestable_block_on(future).expect("unable to start indexer");
|
||||
// Blocking call
|
||||
let _ = service.start_main_runloop(&command_tx, event_rx, None);
|
||||
break;
|
||||
}
|
||||
IndexerCommand::ReplayBlocks(blocks) => {
|
||||
let network = &service.config.network.bitcoin_network;
|
||||
let mut networks = BTreeMap::new();
|
||||
// Retrieve last block height known, and display it
|
||||
networks.insert(
|
||||
network.clone(),
|
||||
BitcoinChainhookNetworkSpecification {
|
||||
start_block: None,
|
||||
end_block: None,
|
||||
blocks: Some(blocks),
|
||||
expire_after_occurrence: None,
|
||||
include_proof: None,
|
||||
include_inputs: None,
|
||||
include_outputs: None,
|
||||
include_witness: None,
|
||||
predicate: BitcoinPredicateType::OrdinalsProtocol(
|
||||
OrdinalOperations::InscriptionFeed,
|
||||
),
|
||||
action: HookAction::Noop,
|
||||
},
|
||||
);
|
||||
let predicate_spec = BitcoinChainhookFullSpecification {
|
||||
uuid: "replay".to_string(),
|
||||
owner_uuid: None,
|
||||
name: "replay".to_string(),
|
||||
version: 1,
|
||||
networks,
|
||||
}
|
||||
.into_selected_network_specification(&network)
|
||||
.unwrap();
|
||||
|
||||
let future = scan_bitcoin_chainstate_via_rpc_using_predicate(
|
||||
&predicate_spec,
|
||||
&service.config,
|
||||
Some(&observer_config),
|
||||
&service.ctx,
|
||||
);
|
||||
let _ = hiro_system_kit::nestable_block_on(future).expect("unable to start indexer");
|
||||
|
||||
if let Some(tx) = observer_config.data_handler_tx {
|
||||
let _ = tx.send(DataHandlerEvent::Terminate);
|
||||
}
|
||||
break;
|
||||
}
|
||||
IndexerCommand::DropBlocks(blocks) => {
|
||||
println!("Will drop blocks {:?}", blocks);
|
||||
}
|
||||
IndexerCommand::RewriteBlocks(blocks) => {
|
||||
println!("Will rewrite blocks {:?}", blocks);
|
||||
}
|
||||
IndexerCommand::SyncBlocks => {
|
||||
println!("Will sync blocks");
|
||||
}
|
||||
IndexerCommand::Terminate => {
|
||||
if let Some(tx) = observer_config.data_handler_tx {
|
||||
let _ = tx.send(DataHandlerEvent::Terminate);
|
||||
}
|
||||
std::process::exit(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
command_tx,
|
||||
custom_indexer_command_tx,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[napi(object)]
|
||||
pub struct OrdinalsIndexerConfig {
|
||||
pub bitcoin_rpc_url: Option<String>,
|
||||
pub bitcoin_rpc_username: Option<String>,
|
||||
pub bitcoin_rpc_password: Option<String>,
|
||||
pub working_dir: Option<String>,
|
||||
pub logs_enabled: Option<bool>,
|
||||
}
|
||||
|
||||
impl OrdinalsIndexerConfig {
|
||||
pub fn default() -> OrdinalsIndexerConfig {
|
||||
OrdinalsIndexerConfig {
|
||||
bitcoin_rpc_url: Some("http://0.0.0.0:8332".to_string()),
|
||||
bitcoin_rpc_username: Some("devnet".to_string()),
|
||||
bitcoin_rpc_password: Some("devnet".to_string()),
|
||||
working_dir: Some("/tmp/ordinals".to_string()),
|
||||
logs_enabled: Some(true),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[napi(js_name = "OrdinalsIndexer")]
|
||||
pub struct OrdinalsIndexer {
|
||||
runloop: OrdinalsIndexingRunloop,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl OrdinalsIndexer {
|
||||
#[napi(constructor)]
|
||||
pub fn new(config_overrides: Option<OrdinalsIndexerConfig>) -> Self {
|
||||
let mut config = Config::mainnet_default();
|
||||
|
||||
if let Some(config_overrides) = config_overrides {
|
||||
if let Some(bitcoin_rpc_url) = config_overrides.bitcoin_rpc_url {
|
||||
config.network.bitcoind_rpc_url = bitcoin_rpc_url.clone();
|
||||
}
|
||||
if let Some(bitcoin_rpc_username) = config_overrides.bitcoin_rpc_username {
|
||||
config.network.bitcoind_rpc_username = bitcoin_rpc_username.clone();
|
||||
}
|
||||
if let Some(bitcoin_rpc_password) = config_overrides.bitcoin_rpc_password {
|
||||
config.network.bitcoind_rpc_password = bitcoin_rpc_password.clone();
|
||||
}
|
||||
if let Some(working_dir) = config_overrides.working_dir {
|
||||
config.storage.working_dir = working_dir.clone();
|
||||
}
|
||||
if let Some(logs_enabled) = config_overrides.logs_enabled {
|
||||
config.logs.chainhook_internals = logs_enabled.clone();
|
||||
}
|
||||
if let Some(logs_enabled) = config_overrides.logs_enabled {
|
||||
config.logs.ordinals_internals = logs_enabled;
|
||||
}
|
||||
}
|
||||
|
||||
let runloop = OrdinalsIndexingRunloop::new(config);
|
||||
|
||||
OrdinalsIndexer { runloop }
|
||||
}
|
||||
|
||||
#[napi(
|
||||
js_name = "onBlock",
|
||||
ts_args_type = "callback: (block: any) => boolean"
|
||||
)]
|
||||
pub fn update_apply_block_callback(&self, apply_block_cb: JsFunction) {
|
||||
let tsfn: ThreadsafeFunction<BitcoinTransactionPayload, ErrorStrategy::Fatal> = apply_block_cb
|
||||
.create_threadsafe_function(0, |ctx| ctx.env.to_js_value(&ctx.value).map(|v| vec![v]))
|
||||
.unwrap();
|
||||
let _ = self
|
||||
.runloop
|
||||
.custom_indexer_command_tx
|
||||
.send(CustomIndexerCommand::UpdateApplyCallback(tsfn));
|
||||
}
|
||||
|
||||
#[napi(
|
||||
js_name = "onBlockRollBack",
|
||||
ts_args_type = "callback: (block: any) => boolean"
|
||||
)]
|
||||
pub fn update_undo_block_callback(&self, undo_block_cb: JsFunction) {
|
||||
let tsfn: ThreadsafeFunction<BitcoinTransactionPayload, ErrorStrategy::Fatal> = undo_block_cb
|
||||
.create_threadsafe_function(
|
||||
0,
|
||||
|ctx: ThreadSafeCallContext<BitcoinTransactionPayload>| {
|
||||
ctx.env.to_js_value(&ctx.value).map(|v| vec![v])
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
let _ = self
|
||||
.runloop
|
||||
.custom_indexer_command_tx
|
||||
.send(CustomIndexerCommand::UpdateUndoCallback(tsfn));
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn stream_blocks(&self) {
|
||||
let _ = self.runloop.command_tx.send(IndexerCommand::StreamBlocks);
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn replay_blocks(&self, blocks: Vec<i64>) {
|
||||
let blocks = blocks
|
||||
.into_iter()
|
||||
.map(|block| block as u64)
|
||||
.collect::<Vec<u64>>();
|
||||
let _ = self
|
||||
.runloop
|
||||
.command_tx
|
||||
.send(IndexerCommand::ReplayBlocks(blocks));
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn replay_block_range(&self, start_block: i64, end_block: i64) {
|
||||
let range = BlockHeights::BlockRange(start_block as u64, end_block as u64);
|
||||
let blocks = range.get_sorted_entries().into_iter().collect();
|
||||
let _ = self
|
||||
.runloop
|
||||
.command_tx
|
||||
.send(IndexerCommand::ReplayBlocks(blocks));
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn terminate(&self) {
|
||||
let _ = self.runloop.command_tx.send(IndexerCommand::Terminate);
|
||||
}
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
//! Defines error handling types used by the create
|
||||
//! uses the `error-chain` create for generation
|
||||
|
||||
use neon;
|
||||
use serde::ser;
|
||||
use std::convert::From;
|
||||
use std::fmt::Display;
|
||||
|
||||
error_chain! {
|
||||
errors {
|
||||
/// nodejs has a hard coded limit on string length
|
||||
/// trying to serialize a string that is too long will result in an error
|
||||
StringTooLong(len: usize) {
|
||||
description("String too long for nodejs")
|
||||
display("String too long for nodejs len: {}", len)
|
||||
}
|
||||
/// when deserializing to a boolean `false` `undefined` `null` `number`
|
||||
/// are valid inputs
|
||||
/// any other types will result in error
|
||||
UnableToCoerce(to_type: &'static str) {
|
||||
description("Unable to coerce")
|
||||
display("Unable to coerce value to type: {}", to_type)
|
||||
}
|
||||
/// occurs when deserializing a char from an empty string
|
||||
EmptyString {
|
||||
description("EmptyString")
|
||||
display("EmptyString")
|
||||
}
|
||||
/// occurs when deserializing a char from a sting with
|
||||
/// more than one character
|
||||
StringTooLongForChar(len: usize) {
|
||||
description("String too long to be a char")
|
||||
display("String too long to be a char expected len: 1 got len: {}", len)
|
||||
}
|
||||
/// occurs when a deserializer expects a `null` or `undefined`
|
||||
/// property and found another type
|
||||
ExpectingNull {
|
||||
description("ExpectingNull")
|
||||
display("ExpectingNull")
|
||||
}
|
||||
/// occurs when deserializing to an enum and the source object has
|
||||
/// a none-1 number of properties
|
||||
InvalidKeyType(key: String) {
|
||||
description("InvalidKeyType")
|
||||
display("key: '{}'", key)
|
||||
}
|
||||
/// an internal deserialization error from an invalid array
|
||||
ArrayIndexOutOfBounds(index: u32, length: u32) {
|
||||
description("ArrayIndexOutOfBounds")
|
||||
display(
|
||||
"ArrayIndexOutOfBounds: attempt to access ({}) size: ({})",
|
||||
index,
|
||||
length
|
||||
)
|
||||
} #[doc(hidden)]
|
||||
/// This type of object is not supported
|
||||
NotImplemented(name: &'static str) {
|
||||
description("Not Implemented")
|
||||
display("Not Implemented: '{}'", name)
|
||||
}
|
||||
/// A JS exception was thrown
|
||||
Js(throw: neon::result::Throw) {
|
||||
description("JS exception")
|
||||
display("JS exception")
|
||||
}
|
||||
/// failed to convert something to f64
|
||||
CastError {
|
||||
description("CastError")
|
||||
display("CastError")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Error for Error {
|
||||
fn custom<T: Display>(msg: T) -> Self {
|
||||
ErrorKind::Msg(msg.to_string()).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<neon::result::Throw> for Error {
|
||||
fn from(throw: neon::result::Throw) -> Self {
|
||||
ErrorKind::Js(throw).into()
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
mod errors;
|
||||
mod ser;
|
||||
|
||||
pub use ser::to_value;
|
||||
@@ -1,575 +0,0 @@
|
||||
//!
|
||||
//! Serialize a Rust data structure into a `JsValue`
|
||||
//!
|
||||
|
||||
use super::errors::Error;
|
||||
use super::errors::ErrorKind;
|
||||
use super::errors::Result as LibResult;
|
||||
use neon::prelude::*;
|
||||
use num;
|
||||
use serde::ser::{self, Serialize};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
fn as_num<T: num::cast::NumCast, OutT: num::cast::NumCast>(n: T) -> LibResult<OutT> {
|
||||
match num::cast::<T, OutT>(n) {
|
||||
Some(n2) => Ok(n2),
|
||||
None => bail!(ErrorKind::CastError),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a value of type `V` to a `JsValue`
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// * `NumberCastError` trying to serialize a `u64` can fail if it overflows in a cast to `f64`
|
||||
/// * `StringTooLong` if the string exceeds v8's max string size
|
||||
///
|
||||
#[inline]
|
||||
pub fn to_value<'j, C, V>(cx: &mut C, value: &V) -> LibResult<Handle<'j, JsValue>>
|
||||
where
|
||||
C: Context<'j>,
|
||||
V: Serialize + ?Sized,
|
||||
{
|
||||
let serializer = Serializer {
|
||||
cx,
|
||||
ph: PhantomData,
|
||||
};
|
||||
let serialized_value = value.serialize(serializer)?;
|
||||
Ok(serialized_value)
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct Serializer<'a, 'j, C: 'a>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
cx: &'a mut C,
|
||||
ph: PhantomData<&'j ()>,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct ArraySerializer<'a, 'j, C: 'a>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
cx: &'a mut C,
|
||||
array: Handle<'j, JsArray>,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct TupleVariantSerializer<'a, 'j, C: 'a>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
outer_object: Handle<'j, JsObject>,
|
||||
inner: ArraySerializer<'a, 'j, C>,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct MapSerializer<'a, 'j, C: 'a>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
cx: &'a mut C,
|
||||
object: Handle<'j, JsObject>,
|
||||
key_holder: Handle<'j, JsObject>,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct StructSerializer<'a, 'j, C: 'a>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
cx: &'a mut C,
|
||||
object: Handle<'j, JsObject>,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct StructVariantSerializer<'a, 'j, C: 'a>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
outer_object: Handle<'j, JsObject>,
|
||||
inner: StructSerializer<'a, 'j, C>,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> ser::Serializer for Serializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
type Ok = Handle<'j, JsValue>;
|
||||
type Error = Error;
|
||||
|
||||
type SerializeSeq = ArraySerializer<'a, 'j, C>;
|
||||
type SerializeTuple = ArraySerializer<'a, 'j, C>;
|
||||
type SerializeTupleStruct = ArraySerializer<'a, 'j, C>;
|
||||
type SerializeTupleVariant = TupleVariantSerializer<'a, 'j, C>;
|
||||
type SerializeMap = MapSerializer<'a, 'j, C>;
|
||||
type SerializeStruct = StructSerializer<'a, 'j, C>;
|
||||
type SerializeStructVariant = StructVariantSerializer<'a, 'j, C>;
|
||||
|
||||
#[inline]
|
||||
fn serialize_bool(self, v: bool) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsBoolean::new(self.cx, v).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_i128(self, v: i128) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_u128(self, v: u128) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_f32(self, v: f32) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, as_num::<_, f64>(v)?).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_f64(self, v: f64) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNumber::new(self.cx, v).upcast())
|
||||
}
|
||||
|
||||
fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {
|
||||
let mut b = [0; 4];
|
||||
let result = v.encode_utf8(&mut b);
|
||||
let js_str =
|
||||
JsString::try_new(self.cx, result).map_err(|_| ErrorKind::StringTooLongForChar(4))?;
|
||||
Ok(js_str.upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_str(self, v: &str) -> Result<Self::Ok, Self::Error> {
|
||||
let len = v.len();
|
||||
let js_str = JsString::try_new(self.cx, v).map_err(|_| ErrorKind::StringTooLong(len))?;
|
||||
Ok(js_str.upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_bytes(self, v: &[u8]) -> Result<Self::Ok, Self::Error> {
|
||||
let mut buff = JsBuffer::new(self.cx, as_num::<_, u32>(v.len())?)?;
|
||||
self.cx
|
||||
.borrow_mut(&mut buff, |buff| buff.as_mut_slice().clone_from_slice(v));
|
||||
Ok(buff.upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNull::new(self.cx).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
value.serialize(self)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNull::new(self.cx).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(JsNull::new(self.cx).upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_unit_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
variant: &'static str,
|
||||
) -> Result<Self::Ok, Self::Error> {
|
||||
self.serialize_str(variant)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_newtype_struct<T: ?Sized>(
|
||||
self,
|
||||
_name: &'static str,
|
||||
value: &T,
|
||||
) -> Result<Self::Ok, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
value.serialize(self)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_newtype_variant<T: ?Sized>(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
variant: &'static str,
|
||||
value: &T,
|
||||
) -> Result<Self::Ok, Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let obj = JsObject::new(&mut *self.cx);
|
||||
let value_js = to_value(self.cx, value)?;
|
||||
obj.set(self.cx, variant, value_js)?;
|
||||
|
||||
Ok(obj.upcast())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
|
||||
Ok(ArraySerializer::new(self.cx))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
|
||||
Ok(ArraySerializer::new(self.cx))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_tuple_struct(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeTupleStruct, Self::Error> {
|
||||
Ok(ArraySerializer::new(self.cx))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_tuple_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
variant: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeTupleVariant, Self::Error> {
|
||||
TupleVariantSerializer::new(self.cx, variant)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
|
||||
Ok(MapSerializer::new(self.cx))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_struct(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeStruct, Self::Error> {
|
||||
Ok(StructSerializer::new(self.cx))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn serialize_struct_variant(
|
||||
self,
|
||||
_name: &'static str,
|
||||
_variant_index: u32,
|
||||
variant: &'static str,
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeStructVariant, Self::Error> {
|
||||
StructVariantSerializer::new(self.cx, variant)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> ArraySerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
#[inline]
|
||||
fn new(cx: &'a mut C) -> Self {
|
||||
let array = JsArray::new(cx, 0);
|
||||
ArraySerializer { cx, array }
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> ser::SerializeSeq for ArraySerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
type Ok = Handle<'j, JsValue>;
|
||||
type Error = Error;
|
||||
|
||||
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let value = to_value(self.cx, value)?;
|
||||
|
||||
let arr: Handle<'j, JsArray> = self.array;
|
||||
let len = arr.len(self.cx);
|
||||
arr.set(self.cx, len, value)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(self.array.upcast())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'j, C> ser::SerializeTuple for ArraySerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
type Ok = Handle<'j, JsValue>;
|
||||
type Error = Error;
|
||||
|
||||
#[inline]
|
||||
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
ser::SerializeSeq::serialize_element(self, value)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
ser::SerializeSeq::end(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> ser::SerializeTupleStruct for ArraySerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
type Ok = Handle<'j, JsValue>;
|
||||
type Error = Error;
|
||||
|
||||
#[inline]
|
||||
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
ser::SerializeSeq::serialize_element(self, value)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
ser::SerializeSeq::end(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> TupleVariantSerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
fn new(cx: &'a mut C, key: &'static str) -> LibResult<Self> {
|
||||
let inner_array = JsArray::new(cx, 0);
|
||||
let outer_object = JsObject::new(cx);
|
||||
outer_object.set(cx, key, inner_array)?;
|
||||
Ok(TupleVariantSerializer {
|
||||
outer_object,
|
||||
inner: ArraySerializer {
|
||||
cx,
|
||||
array: inner_array,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> ser::SerializeTupleVariant for TupleVariantSerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
type Ok = Handle<'j, JsValue>;
|
||||
type Error = Error;
|
||||
|
||||
#[inline]
|
||||
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
use serde::ser::SerializeSeq;
|
||||
self.inner.serialize_element(value)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(self.outer_object.upcast())
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> MapSerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
fn new(cx: &'a mut C) -> Self {
|
||||
let object = JsObject::new(cx);
|
||||
let key_holder = JsObject::new(cx);
|
||||
MapSerializer {
|
||||
cx,
|
||||
object,
|
||||
key_holder,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> ser::SerializeMap for MapSerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
type Ok = Handle<'j, JsValue>;
|
||||
type Error = Error;
|
||||
|
||||
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let key = to_value(self.cx, key)?;
|
||||
self.key_holder.set(self.cx, "key", key)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let key: Handle<'j, JsValue> = self.key_holder.get(&mut *self.cx, "key")?;
|
||||
let value_obj = to_value(self.cx, value)?;
|
||||
self.object.set(self.cx, key, value_obj)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(self.object.upcast())
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> StructSerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
#[inline]
|
||||
fn new(cx: &'a mut C) -> Self {
|
||||
let object = JsObject::new(cx);
|
||||
StructSerializer { cx, object }
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> ser::SerializeStruct for StructSerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
type Ok = Handle<'j, JsValue>;
|
||||
type Error = Error;
|
||||
|
||||
#[inline]
|
||||
fn serialize_field<T: ?Sized>(
|
||||
&mut self,
|
||||
key: &'static str,
|
||||
value: &T,
|
||||
) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let value = to_value(self.cx, value)?;
|
||||
self.object.set(self.cx, key, value)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(self.object.upcast())
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> StructVariantSerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
fn new(cx: &'a mut C, key: &'static str) -> LibResult<Self> {
|
||||
let inner_object = JsObject::new(cx);
|
||||
let outer_object = JsObject::new(cx);
|
||||
outer_object.set(cx, key, inner_object)?;
|
||||
Ok(StructVariantSerializer {
|
||||
outer_object: outer_object,
|
||||
inner: StructSerializer {
|
||||
cx,
|
||||
object: inner_object,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
impl<'a, 'j, C> ser::SerializeStructVariant for StructVariantSerializer<'a, 'j, C>
|
||||
where
|
||||
C: Context<'j>,
|
||||
{
|
||||
type Ok = Handle<'j, JsValue>;
|
||||
type Error = Error;
|
||||
|
||||
#[inline]
|
||||
fn serialize_field<T: ?Sized>(
|
||||
&mut self,
|
||||
key: &'static str,
|
||||
value: &T,
|
||||
) -> Result<(), Self::Error>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
use serde::ser::SerializeStruct;
|
||||
self.inner.serialize_field(key, value)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn end(self) -> Result<Self::Ok, Self::Error> {
|
||||
Ok(self.outer_object.upcast())
|
||||
}
|
||||
}
|
||||
@@ -1,100 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig.json to read more about this file */
|
||||
|
||||
/* Projects */
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
|
||||
// "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */
|
||||
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */
|
||||
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
|
||||
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "es5", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
|
||||
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
|
||||
// "jsx": "preserve", /* Specify what JSX code is generated. */
|
||||
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
||||
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */
|
||||
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
|
||||
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */
|
||||
// "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */
|
||||
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
|
||||
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
|
||||
|
||||
/* Modules */
|
||||
"module": "commonjs", /* Specify what module code is generated. */
|
||||
// "rootDir": "./", /* Specify the root folder within your source files. */
|
||||
// "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
|
||||
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
|
||||
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
|
||||
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
||||
// "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */
|
||||
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
// "resolveJsonModule": true, /* Enable importing .json files */
|
||||
// "noResolve": true, /* Disallow `import`s, `require`s or `<reference>`s from expanding the number of files TypeScript should add to a project. */
|
||||
|
||||
/* JavaScript Support */
|
||||
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */
|
||||
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
|
||||
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */
|
||||
|
||||
/* Emit */
|
||||
"declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
|
||||
"declarationMap": true, /* Create sourcemaps for d.ts files. */
|
||||
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
|
||||
"sourceMap": true, /* Create source map files for emitted JavaScript files. */
|
||||
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */
|
||||
"outDir": "./dist", /* Specify an output folder for all emitted files. */
|
||||
// "removeComments": true, /* Disable emitting comments. */
|
||||
// "noEmit": true, /* Disable emitting files from a compilation. */
|
||||
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
|
||||
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */
|
||||
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
|
||||
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
|
||||
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
|
||||
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
|
||||
// "newLine": "crlf", /* Set the newline character for emitting files. */
|
||||
// "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */
|
||||
// "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */
|
||||
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
|
||||
// "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */
|
||||
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
|
||||
|
||||
/* Interop Constraints */
|
||||
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
|
||||
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */
|
||||
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
|
||||
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
|
||||
|
||||
/* Type Checking */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */
|
||||
// "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */
|
||||
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
|
||||
// "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */
|
||||
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
|
||||
// "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */
|
||||
// "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */
|
||||
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
|
||||
// "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */
|
||||
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */
|
||||
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
|
||||
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
|
||||
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
|
||||
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
|
||||
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */
|
||||
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
|
||||
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
|
||||
|
||||
/* Completeness */
|
||||
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
|
||||
"skipLibCheck": true, /* Skip type checking all .d.ts files. */
|
||||
}
|
||||
}
|
||||
2082
components/ordhook-sdk-js/yarn.lock
Normal file
2082
components/ordhook-sdk-js/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
@@ -2,17 +2,41 @@ FROM rust:bullseye as build
|
||||
|
||||
WORKDIR /src
|
||||
|
||||
RUN apt update && apt install -y ca-certificates pkg-config libssl-dev libclang-11-dev
|
||||
RUN apt-get update && apt-get install -y ca-certificates pkg-config libssl-dev libclang-11-dev curl gnupg
|
||||
|
||||
RUN rustup update 1.72.0 && rustup default 1.72.0
|
||||
|
||||
COPY ./components/ordhook-cli /src/components/ordhook-cli
|
||||
RUN mkdir /out
|
||||
|
||||
ENV NODE_MAJOR=18
|
||||
|
||||
RUN mkdir -p /etc/apt/keyrings
|
||||
|
||||
RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
|
||||
|
||||
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install nodejs -y
|
||||
|
||||
RUN npm install -g @napi-rs/cli yarn
|
||||
|
||||
COPY ./components/ordhook-core /src/components/ordhook-core
|
||||
|
||||
WORKDIR /src/components/ordhook-cli
|
||||
COPY ./components/ordhook-sdk-js /src/components/ordhook-sdk-js
|
||||
|
||||
RUN mkdir /out
|
||||
COPY ./components/ordhook-cli /src/components/ordhook-cli
|
||||
|
||||
WORKDIR /src/components/ordhook-sdk-js
|
||||
|
||||
RUN yarn install
|
||||
|
||||
RUN yarn build
|
||||
|
||||
RUN cp *.node /out
|
||||
|
||||
WORKDIR /src/components/ordhook-cli
|
||||
|
||||
RUN cargo build --features release --release
|
||||
|
||||
@@ -20,8 +44,14 @@ RUN cp target/release/ordhook /out
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
WORKDIR /ordhook-sdk-js
|
||||
|
||||
RUN apt update && apt install -y ca-certificates libssl-dev
|
||||
|
||||
COPY --from=build /out/*.node /ordhook-sdk-js/
|
||||
|
||||
COPY --from=build /out/ordhook /bin/ordhook
|
||||
|
||||
COPY --from=build /out/ordhook /bin/ordhook
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
11
ordhook.code-workspace
Normal file
11
ordhook.code-workspace
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": "."
|
||||
},
|
||||
{
|
||||
"path": "../ordinals-api"
|
||||
}
|
||||
],
|
||||
"settings": {}
|
||||
}
|
||||
6
package-lock.json
generated
Normal file
6
package-lock.json
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "ordhook",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {}
|
||||
}
|
||||
Reference in New Issue
Block a user