mirror of
https://github.com/alexgo-io/bitcoin-indexer.git
synced 2026-01-12 16:52:57 +08:00
Merge pull request #240 from hirosystems/develop
BREAKING CHANGE: implement Jubilee support
This commit is contained in:
56
.github/workflows/ci.yaml
vendored
56
.github/workflows/ci.yaml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
- develop
|
||||
- feat/ordhook-sdk-js
|
||||
paths-ignore:
|
||||
- '**/CHANGELOG.md'
|
||||
- "**/CHANGELOG.md"
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -14,6 +14,9 @@ concurrency:
|
||||
group: ${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DOCKER_IMAGE: hirosystems/${{ github.event.repository.name }}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -41,7 +44,7 @@ jobs:
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
build-publish:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -84,7 +87,7 @@ jobs:
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
hirosystems/${{ github.event.repository.name }}
|
||||
${{ env.DOCKER_IMAGE }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
@@ -115,13 +118,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s-env: [mainnet]
|
||||
network: [mainnet]
|
||||
subenv: [blue]
|
||||
needs: build-publish
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
env:
|
||||
DEPLOY_ENV: dev
|
||||
environment:
|
||||
name: Development-${{ matrix.k8s-env }}
|
||||
name: Development-${{ matrix.network }}-${{ matrix.subenv }}
|
||||
url: https://platform.dev.hiro.so/
|
||||
steps:
|
||||
- name: Checkout actions repo
|
||||
@@ -131,11 +135,13 @@ jobs:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
repository: ${{ secrets.DEVOPS_ACTIONS_REPO }}
|
||||
|
||||
- name: Deploy Ordhook build to Dev ${{ matrix.k8s-env }}
|
||||
- name: Deploy Ordhook build to Dev ${{ matrix.network }} ${{ matrix.subenv }}
|
||||
uses: ./actions/deploy
|
||||
with:
|
||||
docker_tag: ${{ needs.build-publish.outputs.docker_image_digest }}
|
||||
file_pattern: manifests/bitcoin/${{ matrix.k8s-env }}/ordhook/${{ env.DEPLOY_ENV }}/base/kustomization.yaml
|
||||
docker_image: ${{ env.DOCKER_IMAGE }}
|
||||
docker_image_tag_or_digest: ${{ needs.build-publish.outputs.docker_image_digest }}
|
||||
file_pattern: manifests/bitcoin/${{ matrix.network }}/ordhook/${{ env.DEPLOY_ENV }}/*/kustomization.yaml
|
||||
subenv: ${{ matrix.subenv }}
|
||||
gh_token: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
auto-approve-dev:
|
||||
@@ -146,16 +152,17 @@ jobs:
|
||||
- name: Approve pending deployments
|
||||
run: |
|
||||
sleep 5
|
||||
ENV_IDS=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" | jq -r '[.[].environment.id // empty]')
|
||||
ENV_ID=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" | jq -r '.[0].environment.id // empty')
|
||||
if [[ "${ENV_IDS}" != "[]" ]]; then
|
||||
curl -s -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" -d "{\"environment_ids\":${ENV_IDS},\"state\":\"approved\",\"comment\":\"auto approve\"}"
|
||||
curl -s -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" -d "{\"environment_ids\":[${ENV_ID}],\"state\":\"approved\",\"comment\":\"auto approve\"}"
|
||||
fi
|
||||
|
||||
deploy-staging:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s-env: [mainnet]
|
||||
network: [mainnet]
|
||||
subenv: [blue]
|
||||
needs:
|
||||
- build-publish
|
||||
- deploy-dev
|
||||
@@ -163,7 +170,7 @@ jobs:
|
||||
env:
|
||||
DEPLOY_ENV: stg
|
||||
environment:
|
||||
name: Staging-${{ matrix.k8s-env }}
|
||||
name: Staging-${{ matrix.network }}-${{ matrix.subenv }}
|
||||
url: https://platform.stg.hiro.so/
|
||||
steps:
|
||||
- name: Checkout actions repo
|
||||
@@ -173,11 +180,13 @@ jobs:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
repository: ${{ secrets.DEVOPS_ACTIONS_REPO }}
|
||||
|
||||
- name: Deploy Chainhook build to Stg ${{ matrix.k8s-env }}
|
||||
- name: Deploy Ordhook build to Stg ${{ matrix.network }} ${{ matrix.subenv }}
|
||||
uses: ./actions/deploy
|
||||
with:
|
||||
docker_tag: ${{ needs.build-publish.outputs.docker_image_digest }}
|
||||
file_pattern: manifests/bitcoin/${{ matrix.k8s-env }}/ordhook/${{ env.DEPLOY_ENV }}/base/kustomization.yaml
|
||||
docker_image: ${{ env.DOCKER_IMAGE }}
|
||||
docker_image_tag_or_digest: ${{ needs.build-publish.outputs.docker_image_digest }}
|
||||
file_pattern: manifests/bitcoin/${{ matrix.network }}/ordhook/${{ env.DEPLOY_ENV }}/*/kustomization.yaml
|
||||
subenv: ${{ matrix.subenv }}
|
||||
gh_token: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
auto-approve-stg:
|
||||
@@ -190,16 +199,17 @@ jobs:
|
||||
- name: Approve pending deployments
|
||||
run: |
|
||||
sleep 5
|
||||
ENV_IDS=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" | jq -r '[.[].environment.id // empty]')
|
||||
ENV_ID=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" | jq -r '.[0].environment.id // empty')
|
||||
if [[ "${ENV_IDS}" != "[]" ]]; then
|
||||
curl -s -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" -d "{\"environment_ids\":${ENV_IDS},\"state\":\"approved\",\"comment\":\"auto approve\"}"
|
||||
curl -s -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/ordhook/actions/runs/${{ github.run_id }}/pending_deployments" -d "{\"environment_ids\":[${ENV_ID}],\"state\":\"approved\",\"comment\":\"auto approve\"}"
|
||||
fi
|
||||
|
||||
deploy-prod:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
k8s-env: [mainnet,testnet]
|
||||
network: [mainnet]
|
||||
subenv: [blue, green]
|
||||
needs:
|
||||
- build-publish
|
||||
- deploy-staging
|
||||
@@ -207,7 +217,7 @@ jobs:
|
||||
env:
|
||||
DEPLOY_ENV: prd
|
||||
environment:
|
||||
name: Production-${{ matrix.k8s-env }}
|
||||
name: Production-${{ matrix.network }}-${{ matrix.subenv }}
|
||||
url: https://platform.hiro.so/
|
||||
steps:
|
||||
- name: Checkout actions repo
|
||||
@@ -217,9 +227,11 @@ jobs:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
repository: ${{ secrets.DEVOPS_ACTIONS_REPO }}
|
||||
|
||||
- name: Deploy Ordhook build to Prd ${{ matrix.k8s-env }}
|
||||
- name: Deploy Ordhook build to Prd ${{ matrix.network }} ${{ matrix.subenv }}
|
||||
uses: ./actions/deploy
|
||||
with:
|
||||
docker_tag: ${{ needs.build-publish.outputs.docker_image_digest }}
|
||||
file_pattern: manifests/bitcoin/${{ matrix.k8s-env }}/ordhook/${{ env.DEPLOY_ENV }}/base/kustomization.yaml
|
||||
docker_image: ${{ env.DOCKER_IMAGE }}
|
||||
docker_image_tag_or_digest: ${{ needs.build-publish.outputs.docker_image_digest }}
|
||||
file_pattern: manifests/bitcoin/${{ matrix.network }}/ordhook/${{ env.DEPLOY_ENV }}/*/kustomization.yaml
|
||||
subenv: ${{ matrix.subenv }}
|
||||
gh_token: ${{ secrets.GH_TOKEN }}
|
||||
|
||||
578
Cargo.lock
generated
578
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ordhook-cli"
|
||||
version = "1.0.2"
|
||||
version = "2.0.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
@@ -14,7 +14,11 @@ num_cpus = "1.16.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_derive = "1"
|
||||
reqwest = { version = "0.11", default-features = false, features = ["stream", "json", "rustls-tls"] }
|
||||
reqwest = { version = "0.11", default-features = false, features = [
|
||||
"stream",
|
||||
"json",
|
||||
"rustls-tls",
|
||||
] }
|
||||
hiro-system-kit = "0.3.1"
|
||||
clap = { version = "3.2.23", features = ["derive"], optional = true }
|
||||
clap_generate = { version = "3.0.3", optional = true }
|
||||
|
||||
@@ -23,13 +23,14 @@ use ordhook::core::protocol::inscription_parsing::parse_inscriptions_and_standar
|
||||
use ordhook::core::protocol::satoshi_numbering::compute_satoshi_number;
|
||||
use ordhook::db::{
|
||||
delete_data_in_ordhook_db, find_all_inscription_transfers, find_all_inscriptions_in_block,
|
||||
find_all_transfers_in_block, find_inscription_with_id, find_last_block_inserted,
|
||||
find_latest_inscription_block_height, find_lazy_block_at_block_height,
|
||||
find_all_transfers_in_block, find_block_bytes_at_block_height, find_inscription_with_id,
|
||||
find_last_block_inserted, find_latest_inscription_block_height, find_missing_blocks,
|
||||
get_default_ordhook_db_file_path, initialize_ordhook_db, open_ordhook_db_conn_rocks_db_loop,
|
||||
open_readonly_ordhook_db_conn, open_readonly_ordhook_db_conn_rocks_db,
|
||||
open_readwrite_ordhook_db_conn,
|
||||
open_readwrite_ordhook_db_conn, BlockBytesCursor,
|
||||
};
|
||||
use ordhook::download::download_ordinals_dataset_if_required;
|
||||
use ordhook::hex;
|
||||
use ordhook::scan::bitcoin::scan_bitcoin_chainstate_via_rpc_using_predicate;
|
||||
use ordhook::service::{start_observer_forwarding, Service};
|
||||
use reqwest::Client as HttpClient;
|
||||
@@ -338,6 +339,9 @@ struct StartCommand {
|
||||
/// HTTP Auth token
|
||||
#[clap(long = "auth-token")]
|
||||
pub auth_token: Option<String>,
|
||||
/// Check blocks integrity
|
||||
#[clap(long = "check-blocks-integrity")]
|
||||
pub block_integrity_check: bool,
|
||||
}
|
||||
|
||||
#[derive(Subcommand, PartialEq, Clone, Debug)]
|
||||
@@ -577,7 +581,7 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
let mut total_transfers_in_block = 0;
|
||||
|
||||
for (_, inscription) in inscriptions.iter() {
|
||||
println!("Inscription {} revealed at block #{} (inscription_number {}, ordinal_number {})", inscription.get_inscription_id(), block_height, inscription.inscription_number, inscription.ordinal_number);
|
||||
println!("Inscription {} revealed at block #{} (inscription_number {}, ordinal_number {})", inscription.get_inscription_id(), block_height, inscription.inscription_number.jubilee, inscription.ordinal_number);
|
||||
if let Some(transfers) = locations.remove(&inscription.get_inscription_id())
|
||||
{
|
||||
for t in transfers.iter().skip(1) {
|
||||
@@ -639,7 +643,7 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
"Inscription {} revealed at block #{} (inscription_number {}, ordinal_number {})",
|
||||
inscription.get_inscription_id(),
|
||||
block_height,
|
||||
inscription.inscription_number,
|
||||
inscription.inscription_number.jubilee,
|
||||
inscription.ordinal_number
|
||||
);
|
||||
let transfers = find_all_inscription_transfers(
|
||||
@@ -668,15 +672,19 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
.await?;
|
||||
let transaction_identifier = TransactionIdentifier::new(&cmd.transaction_id);
|
||||
let cache = new_traversals_lazy_cache(100);
|
||||
let res = compute_satoshi_number(
|
||||
let (res, mut back_trace) = compute_satoshi_number(
|
||||
&config.get_ordhook_config().db_path,
|
||||
&block.block_identifier,
|
||||
&transaction_identifier,
|
||||
0,
|
||||
0,
|
||||
&Arc::new(cache),
|
||||
true,
|
||||
ctx,
|
||||
)?;
|
||||
back_trace.reverse();
|
||||
for (block_height, tx) in back_trace.iter() {
|
||||
println!("{}\t{}", block_height, hex::encode(tx));
|
||||
}
|
||||
println!("{:?}", res);
|
||||
}
|
||||
Command::Service(subcmd) => match subcmd {
|
||||
@@ -735,7 +743,9 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
}
|
||||
|
||||
let mut service = Service::new(config, ctx.clone());
|
||||
return service.run(predicates, None).await;
|
||||
return service
|
||||
.run(predicates, None, cmd.block_integrity_check)
|
||||
.await;
|
||||
}
|
||||
},
|
||||
Command::Config(subcmd) => match subcmd {
|
||||
@@ -793,9 +803,10 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
ctx,
|
||||
);
|
||||
for i in cmd.get_blocks().into_iter() {
|
||||
let block =
|
||||
find_lazy_block_at_block_height(i as u32, 10, false, &blocks_db, ctx)
|
||||
let block_bytes =
|
||||
find_block_bytes_at_block_height(i as u32, 10, &blocks_db, ctx)
|
||||
.expect("unable to retrieve block {i}");
|
||||
let block = BlockBytesCursor::new(&block_bytes);
|
||||
info!(ctx.expect_logger(), "--------------------");
|
||||
info!(ctx.expect_logger(), "Block: {i}");
|
||||
for tx in block.iter_tx() {
|
||||
@@ -861,18 +872,9 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
|
||||
{
|
||||
let blocks_db =
|
||||
open_readonly_ordhook_db_conn_rocks_db(&config.expected_cache_path(), ctx)?;
|
||||
let tip = find_last_block_inserted(&blocks_db) as u64;
|
||||
let tip = find_last_block_inserted(&blocks_db);
|
||||
println!("Tip: {}", tip);
|
||||
|
||||
let mut missing_blocks = vec![];
|
||||
for i in cmd.start_block..=cmd.end_block {
|
||||
if find_lazy_block_at_block_height(i as u32, 0, false, &blocks_db, ctx)
|
||||
.is_none()
|
||||
{
|
||||
println!("Missing block #{i}");
|
||||
missing_blocks.push(i);
|
||||
}
|
||||
}
|
||||
let missing_blocks = find_missing_blocks(&blocks_db, 1, tip, ctx);
|
||||
println!("{:?}", missing_blocks);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,6 +50,7 @@ impl ConfigFile {
|
||||
"devnet" => (StacksNetwork::Devnet, BitcoinNetwork::Regtest),
|
||||
"testnet" => (StacksNetwork::Testnet, BitcoinNetwork::Testnet),
|
||||
"mainnet" => (StacksNetwork::Mainnet, BitcoinNetwork::Mainnet),
|
||||
"signet" => (StacksNetwork::Testnet, BitcoinNetwork::Signet),
|
||||
_ => return Err("network.mode not supported".to_string()),
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ordhook"
|
||||
version = "0.5.0"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
@@ -10,15 +10,19 @@ serde_json = "1"
|
||||
serde_derive = "1"
|
||||
hex = "0.4.3"
|
||||
rand = "0.8.5"
|
||||
chainhook-sdk = { version = "0.11.0", features = ["zeromq"] }
|
||||
# chainhook-sdk = { version = "=0.10.1", path = "../../../chainhook/components/chainhook-sdk", default-features = false, features = ["zeromq", "log"] }
|
||||
chainhook-sdk = { version = "=0.12.0", features = ["zeromq"] }
|
||||
# chainhook-sdk = { version = "=0.12.0", path = "../../../chainhook/components/chainhook-sdk", features = ["zeromq"] }
|
||||
hiro-system-kit = "0.3.1"
|
||||
reqwest = { version = "0.11", default-features = false, features = ["stream", "json", "rustls-tls"] }
|
||||
tokio = { version = "=1.24", features = ["full"] }
|
||||
reqwest = { version = "0.11", default-features = false, features = [
|
||||
"stream",
|
||||
"json",
|
||||
"rustls-tls",
|
||||
] }
|
||||
tokio = { version = "1.35.1", features = ["full"] }
|
||||
futures-util = "0.3.24"
|
||||
flate2 = "1.0.24"
|
||||
tar = "0.4.38"
|
||||
flume = "0.10.14"
|
||||
flume = "0.11.0"
|
||||
ansi_term = "0.12.1"
|
||||
atty = "0.2.14"
|
||||
crossbeam-channel = "0.5.8"
|
||||
@@ -33,8 +37,12 @@ anyhow = { version = "1.0.56", features = ["backtrace"] }
|
||||
schemars = { version = "0.8.10", git = "https://github.com/hirosystems/schemars.git", branch = "feat-chainhook-fixes" }
|
||||
progressing = '3'
|
||||
futures = "0.3.28"
|
||||
rocksdb = { version = "0.21.0", default-features = false, features = ["snappy"] }
|
||||
rocksdb = { version = "0.21.0", default-features = false, features = [
|
||||
"snappy",
|
||||
] }
|
||||
pprof = { version = "0.13.0", features = ["flamegraph"], optional = true }
|
||||
hyper = { version = "=0.14.27" }
|
||||
lazy_static = { version = "1.4.0" }
|
||||
|
||||
# [profile.release]
|
||||
# debug = true
|
||||
|
||||
@@ -14,7 +14,7 @@ use chainhook_sdk::{
|
||||
|
||||
use crate::{
|
||||
config::{Config, LogConfig},
|
||||
db::{find_lazy_block_at_block_height, open_ordhook_db_conn_rocks_db_loop},
|
||||
db::{find_pinned_block_bytes_at_block_height, open_ordhook_db_conn_rocks_db_loop},
|
||||
};
|
||||
|
||||
use crate::db::{
|
||||
@@ -22,7 +22,7 @@ use crate::db::{
|
||||
open_readonly_ordhook_db_conn,
|
||||
};
|
||||
|
||||
use crate::db::LazyBlockTransaction;
|
||||
use crate::db::TransactionBytesCursor;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OrdhookConfig {
|
||||
@@ -44,11 +44,11 @@ pub fn new_traversals_cache(
|
||||
|
||||
pub fn new_traversals_lazy_cache(
|
||||
cache_size: usize,
|
||||
) -> DashMap<(u32, [u8; 8]), LazyBlockTransaction, BuildHasherDefault<FxHasher>> {
|
||||
) -> DashMap<(u32, [u8; 8]), TransactionBytesCursor, BuildHasherDefault<FxHasher>> {
|
||||
let hasher = FxBuildHasher::default();
|
||||
DashMap::with_capacity_and_hasher(
|
||||
((cache_size.saturating_sub(500)) * 1000 * 1000)
|
||||
.div(LazyBlockTransaction::get_average_bytes_size()),
|
||||
.div(TransactionBytesCursor::get_average_bytes_size()),
|
||||
hasher,
|
||||
)
|
||||
}
|
||||
@@ -139,7 +139,7 @@ pub fn should_sync_ordhook_db(
|
||||
|
||||
match find_latest_inscription_block_height(&inscriptions_db_conn, ctx)? {
|
||||
Some(height) => {
|
||||
if find_lazy_block_at_block_height(height as u32, 3, false, &blocks_db, &ctx).is_none()
|
||||
if find_pinned_block_bytes_at_block_height(height as u32, 3, &blocks_db, &ctx).is_none()
|
||||
{
|
||||
start_block = start_block.min(height);
|
||||
} else {
|
||||
|
||||
@@ -10,7 +10,7 @@ use std::time::Duration;
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::db::LazyBlock;
|
||||
use crate::db::BlockBytesCursor;
|
||||
|
||||
use chainhook_sdk::indexer::bitcoin::{
|
||||
build_http_client, parse_downloaded_block, try_download_block_bytes_with_retry,
|
||||
@@ -19,7 +19,7 @@ use chainhook_sdk::indexer::bitcoin::{
|
||||
use super::protocol::inscription_parsing::parse_inscriptions_and_standardize_block;
|
||||
|
||||
pub enum PostProcessorCommand {
|
||||
ProcessBlocks(Vec<(u64, LazyBlock)>, Vec<BitcoinBlockData>),
|
||||
ProcessBlocks(Vec<(u64, Vec<u8>)>, Vec<BitcoinBlockData>),
|
||||
Terminate,
|
||||
}
|
||||
|
||||
@@ -111,7 +111,7 @@ pub async fn download_and_pipeline_blocks(
|
||||
while let Ok(Some(block_bytes)) = rx.recv() {
|
||||
let raw_block_data =
|
||||
parse_downloaded_block(block_bytes).expect("unable to parse block");
|
||||
let compressed_block = LazyBlock::from_full_block(&raw_block_data)
|
||||
let compressed_block = BlockBytesCursor::from_full_block(&raw_block_data)
|
||||
.expect("unable to compress block");
|
||||
let block_height = raw_block_data.height as u64;
|
||||
let block_data = if block_height >= start_sequencing_blocks_at_height {
|
||||
@@ -177,7 +177,7 @@ pub async fn download_and_pipeline_blocks(
|
||||
}
|
||||
}
|
||||
None => {
|
||||
stop_runloop = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -195,9 +195,9 @@ pub async fn download_and_pipeline_blocks(
|
||||
let mut ooo_compacted_blocks = vec![];
|
||||
for (block_height, block_opt, compacted_block) in new_blocks.into_iter() {
|
||||
if let Some(block) = block_opt {
|
||||
inbox.insert(block_height, (block, compacted_block));
|
||||
inbox.insert(block_height, (block, compacted_block.to_vec()));
|
||||
} else {
|
||||
ooo_compacted_blocks.push((block_height, compacted_block));
|
||||
ooo_compacted_blocks.push((block_height, compacted_block.to_vec()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ use std::{
|
||||
use crate::{
|
||||
config::Config,
|
||||
core::pipeline::{PostProcessorCommand, PostProcessorController, PostProcessorEvent},
|
||||
db::{insert_entry_in_blocks, open_ordhook_db_conn_rocks_db_loop, LazyBlock},
|
||||
db::{insert_entry_in_blocks, open_ordhook_db_conn_rocks_db_loop},
|
||||
};
|
||||
|
||||
pub fn start_block_archiving_processor(
|
||||
@@ -72,7 +72,7 @@ pub fn start_block_archiving_processor(
|
||||
}
|
||||
|
||||
pub fn store_compacted_blocks(
|
||||
mut compacted_blocks: Vec<(u64, LazyBlock)>,
|
||||
mut compacted_blocks: Vec<(u64, Vec<u8>)>,
|
||||
update_tip: bool,
|
||||
blocks_db_rw: &DB,
|
||||
ctx: &Context,
|
||||
|
||||
@@ -38,7 +38,7 @@ use crate::{
|
||||
},
|
||||
};
|
||||
|
||||
use crate::db::{LazyBlockTransaction, TraversalResult};
|
||||
use crate::db::{TransactionBytesCursor, TraversalResult};
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
@@ -68,8 +68,6 @@ pub fn start_inscription_indexing_processor(
|
||||
let mut inscriptions_db_conn_rw =
|
||||
open_readwrite_ordhook_db_conn(&config.expected_cache_path(), &ctx).unwrap();
|
||||
let ordhook_config = config.get_ordhook_config();
|
||||
let blocks_db_rw =
|
||||
open_ordhook_db_conn_rocks_db_loop(true, &config.expected_cache_path(), &ctx);
|
||||
let mut empty_cycles = 0;
|
||||
|
||||
let inscriptions_db_conn =
|
||||
@@ -105,11 +103,12 @@ pub fn start_inscription_indexing_processor(
|
||||
},
|
||||
};
|
||||
|
||||
// Early return
|
||||
if blocks.is_empty() {
|
||||
store_compacted_blocks(compacted_blocks, true, &blocks_db_rw, &ctx);
|
||||
continue;
|
||||
} else {
|
||||
{
|
||||
let blocks_db_rw = open_ordhook_db_conn_rocks_db_loop(
|
||||
true,
|
||||
&config.expected_cache_path(),
|
||||
&ctx,
|
||||
);
|
||||
store_compacted_blocks(
|
||||
compacted_blocks,
|
||||
true,
|
||||
@@ -118,8 +117,12 @@ pub fn start_inscription_indexing_processor(
|
||||
);
|
||||
}
|
||||
|
||||
ctx.try_log(|logger| info!(logger, "Processing {} blocks", blocks.len()));
|
||||
// Early return
|
||||
if blocks.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
ctx.try_log(|logger| info!(logger, "Processing {} blocks", blocks.len()));
|
||||
blocks = process_blocks(
|
||||
&mut blocks,
|
||||
&mut sequence_cursor,
|
||||
@@ -131,7 +134,6 @@ pub fn start_inscription_indexing_processor(
|
||||
);
|
||||
|
||||
garbage_collect_nth_block += blocks.len();
|
||||
|
||||
if garbage_collect_nth_block > garbage_collect_every_n_blocks {
|
||||
ctx.try_log(|logger| info!(logger, "Performing garbage collecting"));
|
||||
|
||||
@@ -162,7 +164,7 @@ pub fn start_inscription_indexing_processor(
|
||||
pub fn process_blocks(
|
||||
next_blocks: &mut Vec<BitcoinBlockData>,
|
||||
sequence_cursor: &mut SequenceCursor,
|
||||
cache_l2: &Arc<DashMap<(u32, [u8; 8]), LazyBlockTransaction, BuildHasherDefault<FxHasher>>>,
|
||||
cache_l2: &Arc<DashMap<(u32, [u8; 8]), TransactionBytesCursor, BuildHasherDefault<FxHasher>>>,
|
||||
inscriptions_db_conn_rw: &mut Connection,
|
||||
ordhook_config: &OrdhookConfig,
|
||||
post_processor: &Option<Sender<BitcoinBlockData>>,
|
||||
@@ -199,7 +201,7 @@ pub fn process_blocks(
|
||||
|
||||
let inscriptions_revealed = get_inscriptions_revealed_in_block(&block)
|
||||
.iter()
|
||||
.map(|d| d.inscription_number.to_string())
|
||||
.map(|d| d.get_inscription_number().to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let inscriptions_transferred = get_inscriptions_transferred_in_block(&block).len();
|
||||
@@ -259,7 +261,7 @@ pub fn process_block(
|
||||
next_blocks: &Vec<BitcoinBlockData>,
|
||||
sequence_cursor: &mut SequenceCursor,
|
||||
cache_l1: &mut BTreeMap<(TransactionIdentifier, usize), TraversalResult>,
|
||||
cache_l2: &Arc<DashMap<(u32, [u8; 8]), LazyBlockTransaction, BuildHasherDefault<FxHasher>>>,
|
||||
cache_l2: &Arc<DashMap<(u32, [u8; 8]), TransactionBytesCursor, BuildHasherDefault<FxHasher>>>,
|
||||
inscriptions_db_tx: &Transaction,
|
||||
ordhook_config: &OrdhookConfig,
|
||||
ctx: &Context,
|
||||
|
||||
@@ -1,317 +1,89 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use chainhook_sdk::bitcoincore_rpc_json::bitcoin::hashes::hex::FromHex;
|
||||
use chainhook_sdk::bitcoincore_rpc_json::bitcoin::Txid;
|
||||
use chainhook_sdk::indexer::bitcoin::BitcoinTransactionFullBreakdown;
|
||||
use chainhook_sdk::indexer::bitcoin::{standardize_bitcoin_block, BitcoinBlockFullBreakdown};
|
||||
use chainhook_sdk::types::{
|
||||
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData, OrdinalInscriptionCurseType,
|
||||
OrdinalInscriptionRevealData, OrdinalInscriptionTransferData, OrdinalOperation,
|
||||
OrdinalInscriptionNumber, OrdinalInscriptionRevealData, OrdinalInscriptionTransferData,
|
||||
OrdinalOperation,
|
||||
};
|
||||
use chainhook_sdk::utils::Context;
|
||||
use chainhook_sdk::{
|
||||
bitcoincore_rpc::bitcoin::Transaction, indexer::bitcoin::BitcoinTransactionFullBreakdown,
|
||||
};
|
||||
use std::collections::BTreeMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::ord::envelope::{Envelope, ParsedEnvelope, RawEnvelope};
|
||||
use crate::ord::inscription::Inscription;
|
||||
use crate::ord::inscription_id::InscriptionId;
|
||||
use {
|
||||
chainhook_sdk::bitcoincore_rpc::bitcoin::{
|
||||
blockdata::{
|
||||
opcodes,
|
||||
script::{self, Instruction, Instructions},
|
||||
},
|
||||
util::taproot::TAPROOT_ANNEX_PREFIX,
|
||||
Script, Witness,
|
||||
},
|
||||
std::{iter::Peekable, str},
|
||||
};
|
||||
|
||||
const PROTOCOL_ID: &[u8] = b"ord";
|
||||
|
||||
const BODY_TAG: &[u8] = &[];
|
||||
const CONTENT_TYPE_TAG: &[u8] = &[1];
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Inscription {
|
||||
pub body: Option<Vec<u8>>,
|
||||
pub content_type: Option<Vec<u8>>,
|
||||
pub curse: Option<OrdinalInscriptionCurseType>,
|
||||
}
|
||||
|
||||
impl Inscription {
|
||||
pub fn from_transaction(tx: &Transaction) -> Option<Inscription> {
|
||||
InscriptionParser::parse(&tx.input.get(0)?.witness).ok()
|
||||
}
|
||||
|
||||
pub(crate) fn body(&self) -> Option<&[u8]> {
|
||||
Some(self.body.as_ref()?)
|
||||
}
|
||||
|
||||
pub(crate) fn content_type(&self) -> Option<&str> {
|
||||
str::from_utf8(self.content_type.as_ref()?).ok()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum InscriptionError {
|
||||
EmptyWitness,
|
||||
InvalidInscription,
|
||||
KeyPathSpend,
|
||||
NoInscription,
|
||||
Script(script::Error),
|
||||
UnrecognizedEvenField,
|
||||
}
|
||||
|
||||
type Result<T, E = InscriptionError> = std::result::Result<T, E>;
|
||||
|
||||
pub struct InscriptionParser<'a> {
|
||||
pub instructions: Peekable<Instructions<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> InscriptionParser<'a> {
|
||||
pub fn parse(witness: &Witness) -> Result<Inscription> {
|
||||
if witness.is_empty() {
|
||||
return Err(InscriptionError::EmptyWitness);
|
||||
}
|
||||
|
||||
if witness.len() == 1 {
|
||||
return Err(InscriptionError::KeyPathSpend);
|
||||
}
|
||||
|
||||
let annex = witness
|
||||
.last()
|
||||
.and_then(|element| element.first().map(|byte| *byte == TAPROOT_ANNEX_PREFIX))
|
||||
.unwrap_or(false);
|
||||
|
||||
if witness.len() == 2 && annex {
|
||||
return Err(InscriptionError::KeyPathSpend);
|
||||
}
|
||||
|
||||
let script = witness
|
||||
.iter()
|
||||
.nth(if annex {
|
||||
witness.len() - 1
|
||||
} else {
|
||||
witness.len() - 2
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
InscriptionParser {
|
||||
instructions: Script::from(Vec::from(script)).instructions().peekable(),
|
||||
}
|
||||
.parse_script()
|
||||
}
|
||||
|
||||
pub fn parse_script(mut self) -> Result<Inscription> {
|
||||
loop {
|
||||
let next = self.advance()?;
|
||||
|
||||
if next == Instruction::PushBytes(&[]) {
|
||||
if let Some(inscription) = self.parse_inscription()? {
|
||||
return Ok(inscription);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn advance(&mut self) -> Result<Instruction<'a>> {
|
||||
self.instructions
|
||||
.next()
|
||||
.ok_or(InscriptionError::NoInscription)?
|
||||
.map_err(InscriptionError::Script)
|
||||
}
|
||||
|
||||
fn parse_inscription(&mut self) -> Result<Option<Inscription>> {
|
||||
if self.advance()? == Instruction::Op(opcodes::all::OP_IF) {
|
||||
if !self.accept(Instruction::PushBytes(PROTOCOL_ID))? {
|
||||
return Err(InscriptionError::NoInscription);
|
||||
}
|
||||
|
||||
let mut fields = BTreeMap::new();
|
||||
|
||||
loop {
|
||||
match self.advance()? {
|
||||
Instruction::PushBytes(BODY_TAG) => {
|
||||
let mut body = Vec::new();
|
||||
while !self.accept(Instruction::Op(opcodes::all::OP_ENDIF))? {
|
||||
body.extend_from_slice(self.expect_push()?);
|
||||
}
|
||||
fields.insert(BODY_TAG, body);
|
||||
break;
|
||||
}
|
||||
Instruction::PushBytes(tag) => {
|
||||
if fields.contains_key(tag) {
|
||||
return Err(InscriptionError::InvalidInscription);
|
||||
}
|
||||
fields.insert(tag, self.expect_push()?.to_vec());
|
||||
}
|
||||
Instruction::Op(opcodes::all::OP_ENDIF) => break,
|
||||
_ => return Err(InscriptionError::InvalidInscription),
|
||||
}
|
||||
}
|
||||
|
||||
let body = fields.remove(BODY_TAG);
|
||||
let content_type = fields.remove(CONTENT_TYPE_TAG);
|
||||
|
||||
for tag in fields.keys() {
|
||||
if let Some(lsb) = tag.first() {
|
||||
if lsb % 2 == 0 {
|
||||
return Ok(Some(Inscription {
|
||||
body,
|
||||
content_type,
|
||||
curse: Some(OrdinalInscriptionCurseType::Tag(*lsb)),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(Some(Inscription {
|
||||
body,
|
||||
content_type,
|
||||
curse: None,
|
||||
}));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn expect_push(&mut self) -> Result<&'a [u8]> {
|
||||
match self.advance()? {
|
||||
Instruction::PushBytes(bytes) => Ok(bytes),
|
||||
_ => Err(InscriptionError::InvalidInscription),
|
||||
}
|
||||
}
|
||||
|
||||
fn accept(&mut self, instruction: Instruction) -> Result<bool> {
|
||||
match self.instructions.peek() {
|
||||
Some(Ok(next)) => {
|
||||
if *next == instruction {
|
||||
self.advance()?;
|
||||
Ok(true)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
Some(Err(err)) => Err(InscriptionError::Script(*err)),
|
||||
None => Ok(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Copy, Clone)]
|
||||
pub(crate) enum Media {
|
||||
Audio,
|
||||
Iframe,
|
||||
Image,
|
||||
Pdf,
|
||||
Text,
|
||||
Unknown,
|
||||
Video,
|
||||
}
|
||||
|
||||
impl Media {
|
||||
const TABLE: &'static [(&'static str, Media, &'static [&'static str])] = &[
|
||||
("application/json", Media::Text, &["json"]),
|
||||
("application/pdf", Media::Pdf, &["pdf"]),
|
||||
("application/pgp-signature", Media::Text, &["asc"]),
|
||||
("application/yaml", Media::Text, &["yaml", "yml"]),
|
||||
("audio/flac", Media::Audio, &["flac"]),
|
||||
("audio/mpeg", Media::Audio, &["mp3"]),
|
||||
("audio/wav", Media::Audio, &["wav"]),
|
||||
("image/apng", Media::Image, &["apng"]),
|
||||
("image/avif", Media::Image, &[]),
|
||||
("image/gif", Media::Image, &["gif"]),
|
||||
("image/jpeg", Media::Image, &["jpg", "jpeg"]),
|
||||
("image/png", Media::Image, &["png"]),
|
||||
("image/svg+xml", Media::Iframe, &["svg"]),
|
||||
("image/webp", Media::Image, &["webp"]),
|
||||
("model/stl", Media::Unknown, &["stl"]),
|
||||
("text/html;charset=utf-8", Media::Iframe, &["html"]),
|
||||
("text/plain;charset=utf-8", Media::Text, &["txt"]),
|
||||
("video/mp4", Media::Video, &["mp4"]),
|
||||
("video/webm", Media::Video, &["webm"]),
|
||||
];
|
||||
}
|
||||
|
||||
impl FromStr for Media {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
for entry in Self::TABLE {
|
||||
if entry.0 == s {
|
||||
return Ok(entry.1);
|
||||
}
|
||||
}
|
||||
|
||||
Err("unknown content type: {s}".to_string())
|
||||
}
|
||||
}
|
||||
use {chainhook_sdk::bitcoincore_rpc::bitcoin::Witness, std::str};
|
||||
|
||||
pub fn parse_inscriptions_from_witness(
|
||||
input_index: usize,
|
||||
witness_bytes: Vec<Vec<u8>>,
|
||||
txid: &str,
|
||||
) -> Option<OrdinalOperation> {
|
||||
let witness = Witness::from_vec(witness_bytes.clone());
|
||||
let mut inscription = match InscriptionParser::parse(&witness) {
|
||||
Ok(inscription) => inscription,
|
||||
Err(_e) => {
|
||||
let mut cursed_inscription = None;
|
||||
for bytes in witness_bytes.iter() {
|
||||
let script = Script::from(bytes.to_vec());
|
||||
let parser = InscriptionParser {
|
||||
instructions: script.instructions().peekable(),
|
||||
};
|
||||
) -> Option<Vec<OrdinalInscriptionRevealData>> {
|
||||
let witness = Witness::from_slice(&witness_bytes);
|
||||
let tapscript = witness.tapscript()?;
|
||||
let envelopes: Vec<Envelope<Inscription>> = RawEnvelope::from_tapscript(tapscript, input_index)
|
||||
.ok()?
|
||||
.into_iter()
|
||||
.map(|e| ParsedEnvelope::from(e))
|
||||
.collect();
|
||||
let mut inscriptions = vec![];
|
||||
for envelope in envelopes.into_iter() {
|
||||
let curse_type = if envelope.payload.unrecognized_even_field {
|
||||
Some(OrdinalInscriptionCurseType::UnrecognizedEvenField)
|
||||
} else if envelope.payload.duplicate_field {
|
||||
Some(OrdinalInscriptionCurseType::DuplicateField)
|
||||
} else if envelope.payload.incomplete_field {
|
||||
Some(OrdinalInscriptionCurseType::IncompleteField)
|
||||
} else if envelope.input != 0 {
|
||||
Some(OrdinalInscriptionCurseType::NotInFirstInput)
|
||||
} else if envelope.offset != 0 {
|
||||
Some(OrdinalInscriptionCurseType::NotAtOffsetZero)
|
||||
} else if envelope.payload.pointer.is_some() {
|
||||
Some(OrdinalInscriptionCurseType::Pointer)
|
||||
} else if envelope.pushnum {
|
||||
Some(OrdinalInscriptionCurseType::Pushnum)
|
||||
} else if envelope.stutter {
|
||||
Some(OrdinalInscriptionCurseType::Stutter)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut inscription = match parser.parse_script() {
|
||||
Ok(inscription) => inscription,
|
||||
Err(_) => continue,
|
||||
};
|
||||
inscription.curse = Some(OrdinalInscriptionCurseType::P2wsh);
|
||||
cursed_inscription = Some(inscription);
|
||||
break;
|
||||
}
|
||||
match cursed_inscription {
|
||||
Some(inscription) => inscription,
|
||||
None => return None,
|
||||
}
|
||||
}
|
||||
};
|
||||
let inscription_id = InscriptionId {
|
||||
txid: Txid::from_str(txid).unwrap(),
|
||||
index: input_index as u32,
|
||||
};
|
||||
|
||||
let inscription_id = InscriptionId {
|
||||
txid: Txid::from_hex(txid).unwrap(),
|
||||
index: input_index as u32,
|
||||
};
|
||||
let no_content_bytes = vec![];
|
||||
let inscription_content_bytes = envelope.payload.body().take().unwrap_or(&no_content_bytes);
|
||||
let mut content_bytes = "0x".to_string();
|
||||
content_bytes.push_str(&hex::encode(&inscription_content_bytes));
|
||||
|
||||
if input_index > 0 {
|
||||
inscription.curse = Some(OrdinalInscriptionCurseType::Batch);
|
||||
let reveal_data = OrdinalInscriptionRevealData {
|
||||
content_type: envelope
|
||||
.payload
|
||||
.content_type()
|
||||
.unwrap_or("unknown")
|
||||
.to_string(),
|
||||
content_bytes,
|
||||
content_length: inscription_content_bytes.len(),
|
||||
inscription_id: inscription_id.to_string(),
|
||||
inscription_input_index: input_index,
|
||||
tx_index: 0,
|
||||
inscription_output_value: 0,
|
||||
inscription_fee: 0,
|
||||
inscription_number: OrdinalInscriptionNumber::zero(),
|
||||
inscriber_address: None,
|
||||
ordinal_number: 0,
|
||||
ordinal_block_height: 0,
|
||||
ordinal_offset: 0,
|
||||
transfers_pre_inscription: 0,
|
||||
satpoint_post_inscription: format!(""),
|
||||
curse_type,
|
||||
};
|
||||
inscriptions.push(reveal_data);
|
||||
}
|
||||
|
||||
let no_content_bytes = vec![];
|
||||
let inscription_content_bytes = inscription.body().take().unwrap_or(&no_content_bytes);
|
||||
let mut content_bytes = "0x".to_string();
|
||||
content_bytes.push_str(&hex::encode(&inscription_content_bytes));
|
||||
|
||||
let payload = OrdinalInscriptionRevealData {
|
||||
content_type: inscription.content_type().unwrap_or("unknown").to_string(),
|
||||
content_bytes,
|
||||
content_length: inscription_content_bytes.len(),
|
||||
inscription_id: inscription_id.to_string(),
|
||||
inscription_input_index: input_index,
|
||||
tx_index: 0,
|
||||
inscription_output_value: 0,
|
||||
inscription_fee: 0,
|
||||
inscription_number: 0,
|
||||
inscriber_address: None,
|
||||
ordinal_number: 0,
|
||||
ordinal_block_height: 0,
|
||||
ordinal_offset: 0,
|
||||
transfers_pre_inscription: 0,
|
||||
satpoint_post_inscription: format!(""),
|
||||
curse_type: inscription.curse.take(),
|
||||
};
|
||||
|
||||
Some(OrdinalOperation::InscriptionRevealed(payload))
|
||||
Some(inscriptions)
|
||||
}
|
||||
|
||||
pub fn parse_inscriptions_from_standardized_tx(
|
||||
@@ -326,12 +98,14 @@ pub fn parse_inscriptions_from_standardized_tx(
|
||||
.map(|w| hex::decode(&w[2..]).unwrap())
|
||||
.collect();
|
||||
|
||||
if let Some(operation) = parse_inscriptions_from_witness(
|
||||
if let Some(inscriptions) = parse_inscriptions_from_witness(
|
||||
input_index,
|
||||
witness_bytes,
|
||||
tx.transaction_identifier.get_hash_bytes_str(),
|
||||
) {
|
||||
operations.push(operation);
|
||||
for inscription in inscriptions.into_iter() {
|
||||
operations.push(OrdinalOperation::InscriptionRevealed(inscription));
|
||||
}
|
||||
}
|
||||
}
|
||||
operations
|
||||
@@ -349,32 +123,34 @@ pub fn parse_inscriptions_in_raw_tx(
|
||||
.map(|w| hex::decode(w).unwrap())
|
||||
.collect();
|
||||
|
||||
if let Some(operation) =
|
||||
if let Some(inscriptions) =
|
||||
parse_inscriptions_from_witness(input_index, witness_bytes, &tx.txid)
|
||||
{
|
||||
operations.push(operation);
|
||||
for inscription in inscriptions.into_iter() {
|
||||
operations.push(OrdinalOperation::InscriptionRevealed(inscription));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
operations
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ordinal_inscription_parsing() {
|
||||
let bytes = hex::decode("208737bc46923c3e64c7e6768c0346879468bf3aba795a5f5f56efca288f50ed2aac0063036f7264010118746578742f706c61696e3b636861727365743d7574662d38004c9948656c6c6f2030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030300a68").unwrap();
|
||||
// #[test]
|
||||
// fn test_ordinal_inscription_parsing() {
|
||||
// let bytes = hex::decode("208737bc46923c3e64c7e6768c0346879468bf3aba795a5f5f56efca288f50ed2aac0063036f7264010118746578742f706c61696e3b636861727365743d7574662d38004c9948656c6c6f2030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030300a68").unwrap();
|
||||
|
||||
let script = Script::from(bytes);
|
||||
let parser = InscriptionParser {
|
||||
instructions: script.instructions().peekable(),
|
||||
};
|
||||
// let script = Script::from(bytes);
|
||||
// let parser = InscriptionParser {
|
||||
// instructions: script.instructions().peekable(),
|
||||
// };
|
||||
|
||||
let inscription = match parser.parse_script() {
|
||||
Ok(inscription) => inscription,
|
||||
Err(_) => panic!(),
|
||||
};
|
||||
// let inscription = match parser.parse_script() {
|
||||
// Ok(inscription) => inscription,
|
||||
// Err(_) => panic!(),
|
||||
// };
|
||||
|
||||
println!("{:?}", inscription);
|
||||
}
|
||||
// println!("{:?}", inscription);
|
||||
// }
|
||||
|
||||
pub fn parse_inscriptions_and_standardize_block(
|
||||
raw_block: BitcoinBlockFullBreakdown,
|
||||
|
||||
@@ -5,10 +5,11 @@ use std::{
|
||||
};
|
||||
|
||||
use chainhook_sdk::{
|
||||
bitcoincore_rpc_json::bitcoin::{hashes::hex::FromHex, Address, Network, Script},
|
||||
bitcoincore_rpc_json::bitcoin::{Address, Network, ScriptBuf},
|
||||
types::{
|
||||
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData, BlockIdentifier,
|
||||
OrdinalInscriptionCurseType, OrdinalOperation, TransactionIdentifier,
|
||||
OrdinalInscriptionCurseType, OrdinalInscriptionNumber, OrdinalOperation,
|
||||
TransactionIdentifier,
|
||||
},
|
||||
utils::Context,
|
||||
};
|
||||
@@ -20,11 +21,10 @@ use rusqlite::{Connection, Transaction};
|
||||
use crate::{
|
||||
core::OrdhookConfig,
|
||||
db::{
|
||||
find_blessed_inscription_with_ordinal_number,
|
||||
find_latest_cursed_inscription_number_at_block_height,
|
||||
find_latest_inscription_number_at_block_height, format_satpoint_to_watch,
|
||||
update_inscriptions_with_block, update_sequence_metadata_with_block, LazyBlockTransaction,
|
||||
TraversalResult,
|
||||
find_blessed_inscription_with_ordinal_number, find_nth_classic_neg_number_at_block_height,
|
||||
find_nth_classic_pos_number_at_block_height, find_nth_jubilee_number_at_block_height,
|
||||
format_inscription_id, format_satpoint_to_watch, update_inscriptions_with_block,
|
||||
update_sequence_metadata_with_block, TransactionBytesCursor, TraversalResult,
|
||||
},
|
||||
ord::height::Height,
|
||||
};
|
||||
@@ -68,12 +68,18 @@ pub fn parallelize_inscription_data_computations(
|
||||
block: &BitcoinBlockData,
|
||||
next_blocks: &Vec<BitcoinBlockData>,
|
||||
cache_l1: &mut BTreeMap<(TransactionIdentifier, usize), TraversalResult>,
|
||||
cache_l2: &Arc<DashMap<(u32, [u8; 8]), LazyBlockTransaction, BuildHasherDefault<FxHasher>>>,
|
||||
cache_l2: &Arc<DashMap<(u32, [u8; 8]), TransactionBytesCursor, BuildHasherDefault<FxHasher>>>,
|
||||
inscriptions_db_tx: &Transaction,
|
||||
ordhook_config: &OrdhookConfig,
|
||||
ctx: &Context,
|
||||
) -> Result<bool, String> {
|
||||
ctx.try_log(|logger| {
|
||||
let inner_ctx = if ordhook_config.logs.ordinals_internals {
|
||||
ctx.clone()
|
||||
} else {
|
||||
Context::empty()
|
||||
};
|
||||
|
||||
inner_ctx.try_log(|logger| {
|
||||
info!(
|
||||
logger,
|
||||
"Inscriptions data computation for block #{} started", block.block_identifier.index
|
||||
@@ -83,12 +89,6 @@ pub fn parallelize_inscription_data_computations(
|
||||
let (mut transactions_ids, l1_cache_hits) =
|
||||
get_transactions_to_process(block, cache_l1, inscriptions_db_tx, ctx);
|
||||
|
||||
let inner_ctx = if ordhook_config.logs.ordinals_internals {
|
||||
ctx.clone()
|
||||
} else {
|
||||
Context::empty()
|
||||
};
|
||||
|
||||
let has_transactions_to_process = !transactions_ids.is_empty() || !l1_cache_hits.is_empty();
|
||||
|
||||
let thread_max = ordhook_config.ingestion_thread_max;
|
||||
@@ -118,13 +118,13 @@ pub fn parallelize_inscription_data_computations(
|
||||
while let Ok(Some((transaction_id, block_identifier, input_index, prioritary))) =
|
||||
rx.recv()
|
||||
{
|
||||
let traversal: Result<TraversalResult, String> = compute_satoshi_number(
|
||||
let traversal: Result<(TraversalResult, _), String> = compute_satoshi_number(
|
||||
&moved_ordhook_db_path,
|
||||
&block_identifier,
|
||||
&transaction_id,
|
||||
input_index,
|
||||
0,
|
||||
&local_cache,
|
||||
false,
|
||||
&moved_ctx,
|
||||
);
|
||||
let _ = moved_traversal_tx.send((traversal, prioritary, thread_index));
|
||||
@@ -134,11 +134,12 @@ pub fn parallelize_inscription_data_computations(
|
||||
thread_pool_handles.push(handle);
|
||||
}
|
||||
|
||||
// Consume L1 cache
|
||||
// Consume L1 cache: if the traversal was performed in a previous round
|
||||
// retrieve it and use it.
|
||||
let mut thread_index = 0;
|
||||
for key in l1_cache_hits.iter() {
|
||||
if let Some(entry) = cache_l1.remove(key) {
|
||||
let _ = traversal_tx.send((Ok(entry), true, thread_index));
|
||||
if let Some(entry) = cache_l1.get(key) {
|
||||
let _ = traversal_tx.send((Ok((entry.clone(), vec![])), true, thread_index));
|
||||
thread_index = (thread_index + 1) % thread_max;
|
||||
}
|
||||
}
|
||||
@@ -148,7 +149,7 @@ pub fn parallelize_inscription_data_computations(
|
||||
.map(|b| format!("{}", b.block_identifier.index))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
ctx.try_log(|logger| {
|
||||
inner_ctx.try_log(|logger| {
|
||||
info!(
|
||||
logger,
|
||||
"Number of inscriptions in block #{} to process: {} (L1 cache hits: {}, queue: [{}], L1 cache len: {}, L2 cache len: {})",
|
||||
@@ -190,7 +191,7 @@ pub fn parallelize_inscription_data_computations(
|
||||
traversals_received += 1;
|
||||
}
|
||||
match traversal_result {
|
||||
Ok(traversal) => {
|
||||
Ok((traversal, _)) => {
|
||||
inner_ctx.try_log(|logger| {
|
||||
info!(
|
||||
logger,
|
||||
@@ -231,7 +232,7 @@ pub fn parallelize_inscription_data_computations(
|
||||
let (mut transactions_ids, _) =
|
||||
get_transactions_to_process(next_block, cache_l1, inscriptions_db_tx, ctx);
|
||||
|
||||
ctx.try_log(|logger| {
|
||||
inner_ctx.try_log(|logger| {
|
||||
info!(
|
||||
logger,
|
||||
"Number of inscriptions in block #{} to pre-process: {}",
|
||||
@@ -254,7 +255,7 @@ pub fn parallelize_inscription_data_computations(
|
||||
}
|
||||
}
|
||||
}
|
||||
ctx.try_log(|logger| {
|
||||
inner_ctx.try_log(|logger| {
|
||||
info!(
|
||||
logger,
|
||||
"Inscriptions data computation for block #{} collected", block.block_identifier.index
|
||||
@@ -265,7 +266,7 @@ pub fn parallelize_inscription_data_computations(
|
||||
for tx in tx_thread_pool.iter() {
|
||||
// Empty the queue
|
||||
if let Ok((traversal_result, _prioritary, thread_index)) = traversal_rx.try_recv() {
|
||||
if let Ok(traversal) = traversal_result {
|
||||
if let Ok((traversal, _)) = traversal_result {
|
||||
inner_ctx.try_log(|logger| {
|
||||
info!(
|
||||
logger,
|
||||
@@ -289,7 +290,7 @@ pub fn parallelize_inscription_data_computations(
|
||||
let _ = tx.send(None);
|
||||
}
|
||||
|
||||
let ctx_moved = ctx.clone();
|
||||
let ctx_moved = inner_ctx.clone();
|
||||
let _ = hiro_system_kit::thread_named("Garbage collection").spawn(move || {
|
||||
ctx_moved.try_log(|logger| info!(logger, "Cleanup: threadpool deallocation started",));
|
||||
|
||||
@@ -299,7 +300,7 @@ pub fn parallelize_inscription_data_computations(
|
||||
ctx_moved.try_log(|logger| info!(logger, "Cleanup: threadpool deallocation ended",));
|
||||
});
|
||||
|
||||
ctx.try_log(|logger| {
|
||||
inner_ctx.try_log(|logger| {
|
||||
info!(
|
||||
logger,
|
||||
"Inscriptions data computation for block #{} ended", block.block_identifier.index
|
||||
@@ -335,7 +336,7 @@ fn get_transactions_to_process(
|
||||
let mut transactions_ids: Vec<(TransactionIdentifier, usize)> = vec![];
|
||||
let mut l1_cache_hits = vec![];
|
||||
|
||||
let mut known_transactions =
|
||||
let known_transactions =
|
||||
find_all_inscriptions_in_block(&block.block_identifier.index, inscriptions_db_tx, ctx);
|
||||
|
||||
for tx in block.transactions.iter().skip(1) {
|
||||
@@ -356,7 +357,7 @@ fn get_transactions_to_process(
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(_) = known_transactions.remove(&key) {
|
||||
if let Some(_) = known_transactions.get(&key) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -378,8 +379,9 @@ fn get_transactions_to_process(
|
||||
/// use.
|
||||
///
|
||||
pub struct SequenceCursor<'a> {
|
||||
blessed: Option<i64>,
|
||||
cursed: Option<i64>,
|
||||
pos_cursor: Option<i64>,
|
||||
neg_cursor: Option<i64>,
|
||||
jubilee_cursor: Option<i64>,
|
||||
inscriptions_db_conn: &'a Connection,
|
||||
current_block_height: u64,
|
||||
}
|
||||
@@ -387,41 +389,62 @@ pub struct SequenceCursor<'a> {
|
||||
impl<'a> SequenceCursor<'a> {
|
||||
pub fn new(inscriptions_db_conn: &'a Connection) -> SequenceCursor<'a> {
|
||||
SequenceCursor {
|
||||
blessed: None,
|
||||
cursed: None,
|
||||
jubilee_cursor: None,
|
||||
pos_cursor: None,
|
||||
neg_cursor: None,
|
||||
inscriptions_db_conn,
|
||||
current_block_height: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset(&mut self) {
|
||||
self.blessed = None;
|
||||
self.cursed = None;
|
||||
self.pos_cursor = None;
|
||||
self.neg_cursor = None;
|
||||
self.jubilee_cursor = None;
|
||||
self.current_block_height = 0;
|
||||
}
|
||||
|
||||
pub fn pick_next(&mut self, cursed: bool, block_height: u64, ctx: &Context) -> i64 {
|
||||
pub fn pick_next(
|
||||
&mut self,
|
||||
cursed: bool,
|
||||
block_height: u64,
|
||||
network: &Network,
|
||||
ctx: &Context,
|
||||
) -> OrdinalInscriptionNumber {
|
||||
if block_height < self.current_block_height {
|
||||
self.reset();
|
||||
}
|
||||
self.current_block_height = block_height;
|
||||
|
||||
match cursed {
|
||||
true => self.pick_next_cursed(ctx),
|
||||
false => self.pick_next_blessed(ctx),
|
||||
}
|
||||
let classic = match cursed {
|
||||
true => self.pick_next_neg_number(ctx),
|
||||
false => self.pick_next_pos_number(ctx),
|
||||
};
|
||||
let jubilee_height = match network {
|
||||
Network::Bitcoin => 824544,
|
||||
Network::Regtest => 110,
|
||||
Network::Signet => 175392,
|
||||
Network::Testnet => 2544192,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let jubilee = if block_height >= jubilee_height {
|
||||
self.pick_next_jubilee_number(ctx)
|
||||
} else {
|
||||
classic
|
||||
};
|
||||
OrdinalInscriptionNumber { classic, jubilee }
|
||||
}
|
||||
|
||||
fn pick_next_blessed(&mut self, ctx: &Context) -> i64 {
|
||||
match self.blessed {
|
||||
fn pick_next_pos_number(&mut self, ctx: &Context) -> i64 {
|
||||
match self.pos_cursor {
|
||||
None => {
|
||||
match find_latest_inscription_number_at_block_height(
|
||||
match find_nth_classic_pos_number_at_block_height(
|
||||
&self.current_block_height,
|
||||
&self.inscriptions_db_conn,
|
||||
&ctx,
|
||||
) {
|
||||
Some(inscription_number) => {
|
||||
self.blessed = Some(inscription_number);
|
||||
self.pos_cursor = Some(inscription_number);
|
||||
inscription_number + 1
|
||||
}
|
||||
_ => 0,
|
||||
@@ -431,16 +454,35 @@ impl<'a> SequenceCursor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn pick_next_cursed(&mut self, ctx: &Context) -> i64 {
|
||||
match self.cursed {
|
||||
fn pick_next_jubilee_number(&mut self, ctx: &Context) -> i64 {
|
||||
match self.pos_cursor {
|
||||
None => {
|
||||
match find_latest_cursed_inscription_number_at_block_height(
|
||||
match find_nth_jubilee_number_at_block_height(
|
||||
&self.current_block_height,
|
||||
&self.inscriptions_db_conn,
|
||||
&ctx,
|
||||
) {
|
||||
Some(inscription_number) => {
|
||||
self.cursed = Some(inscription_number);
|
||||
self.jubilee_cursor = Some(inscription_number);
|
||||
inscription_number + 1
|
||||
}
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
Some(value) => value + 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn pick_next_neg_number(&mut self, ctx: &Context) -> i64 {
|
||||
match self.neg_cursor {
|
||||
None => {
|
||||
match find_nth_classic_neg_number_at_block_height(
|
||||
&self.current_block_height,
|
||||
&self.inscriptions_db_conn,
|
||||
&ctx,
|
||||
) {
|
||||
Some(inscription_number) => {
|
||||
self.neg_cursor = Some(inscription_number);
|
||||
inscription_number - 1
|
||||
}
|
||||
_ => -1,
|
||||
@@ -450,12 +492,16 @@ impl<'a> SequenceCursor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn increment_cursed(&mut self, ctx: &Context) {
|
||||
self.cursed = Some(self.pick_next_cursed(ctx));
|
||||
pub fn increment_neg_cursor(&mut self, ctx: &Context) {
|
||||
self.neg_cursor = Some(self.pick_next_neg_number(ctx));
|
||||
}
|
||||
|
||||
pub fn increment_blessed(&mut self, ctx: &Context) {
|
||||
self.blessed = Some(self.pick_next_blessed(ctx))
|
||||
pub fn increment_pos_number(&mut self, ctx: &Context) {
|
||||
self.pos_cursor = Some(self.pick_next_pos_number(ctx))
|
||||
}
|
||||
|
||||
pub fn increment_jubilee_number(&mut self, ctx: &Context) {
|
||||
self.jubilee_cursor = Some(self.pick_next_jubilee_number(ctx))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -549,13 +595,13 @@ pub fn augment_block_with_ordinals_inscriptions_data(
|
||||
};
|
||||
let is_curse = inscription_data.curse_type.is_some();
|
||||
let inscription_number =
|
||||
sequence_cursor.pick_next(is_curse, block.block_identifier.index, &ctx);
|
||||
sequence_cursor.pick_next(is_curse, block.block_identifier.index, &network, &ctx);
|
||||
inscription_data.inscription_number = inscription_number;
|
||||
|
||||
if is_curse {
|
||||
sequence_cursor.increment_cursed(ctx);
|
||||
sequence_cursor.increment_neg_cursor(ctx);
|
||||
} else {
|
||||
sequence_cursor.increment_blessed(ctx);
|
||||
sequence_cursor.increment_pos_number(ctx);
|
||||
};
|
||||
|
||||
ctx.try_log(|logger| {
|
||||
@@ -563,7 +609,7 @@ pub fn augment_block_with_ordinals_inscriptions_data(
|
||||
logger,
|
||||
"Unbound inscription {} (#{}) detected on Satoshi {} (block #{}, {} transfers)",
|
||||
inscription_data.inscription_id,
|
||||
inscription_data.inscription_number,
|
||||
inscription_data.get_inscription_number(),
|
||||
inscription_data.ordinal_number,
|
||||
block.block_identifier.index,
|
||||
inscription_data.transfers_pre_inscription,
|
||||
@@ -591,8 +637,7 @@ fn augment_transaction_with_ordinals_inscriptions_data(
|
||||
ctx: &Context,
|
||||
) -> bool {
|
||||
let any_event = tx.metadata.ordinal_operations.is_empty() == false;
|
||||
let mut ordinals_ops_indexes_to_discard = VecDeque::new();
|
||||
|
||||
let mut inscription_subindex = 0;
|
||||
for (op_index, op) in tx.metadata.ordinal_operations.iter_mut().enumerate() {
|
||||
let (mut is_cursed, inscription) = match op {
|
||||
OrdinalOperation::InscriptionRevealed(inscription) => {
|
||||
@@ -602,26 +647,26 @@ fn augment_transaction_with_ordinals_inscriptions_data(
|
||||
};
|
||||
|
||||
let transaction_identifier = tx.transaction_identifier.clone();
|
||||
let inscription_id = format_inscription_id(&transaction_identifier, inscription_subindex);
|
||||
let traversal = match inscriptions_data
|
||||
.remove(&(transaction_identifier, inscription.inscription_input_index))
|
||||
.get(&(transaction_identifier, inscription.inscription_input_index))
|
||||
{
|
||||
Some(traversal) => traversal,
|
||||
None => {
|
||||
let err_msg = format!(
|
||||
"Unable to retrieve backward traversal result for inscription {}",
|
||||
tx.transaction_identifier.hash
|
||||
);
|
||||
ctx.try_log(|logger| {
|
||||
error!(
|
||||
logger,
|
||||
"Unable to retrieve cached inscription data for inscription {}",
|
||||
tx.transaction_identifier.hash
|
||||
);
|
||||
error!(logger, "{}", err_msg);
|
||||
});
|
||||
ordinals_ops_indexes_to_discard.push_front(op_index);
|
||||
continue;
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
// Do we need to curse the inscription?
|
||||
let mut inscription_number =
|
||||
sequence_cursor.pick_next(is_cursed, block_identifier.index, ctx);
|
||||
sequence_cursor.pick_next(is_cursed, block_identifier.index, network, ctx);
|
||||
let mut curse_type_override = None;
|
||||
if !is_cursed {
|
||||
// Is this inscription re-inscribing an existing blessed inscription?
|
||||
@@ -640,13 +685,14 @@ fn augment_transaction_with_ordinals_inscriptions_data(
|
||||
|
||||
is_cursed = true;
|
||||
inscription_number =
|
||||
sequence_cursor.pick_next(is_cursed, block_identifier.index, ctx);
|
||||
sequence_cursor.pick_next(is_cursed, block_identifier.index, network, ctx);
|
||||
curse_type_override = Some(OrdinalInscriptionCurseType::Reinscription)
|
||||
}
|
||||
};
|
||||
|
||||
let outputs = &tx.metadata.outputs;
|
||||
inscription.inscription_id = inscription_id;
|
||||
inscription.inscription_number = inscription_number;
|
||||
let outputs = &tx.metadata.outputs;
|
||||
inscription.ordinal_offset = traversal.get_ordinal_coinbase_offset();
|
||||
inscription.ordinal_block_height = traversal.get_ordinal_coinbase_height();
|
||||
inscription.ordinal_number = traversal.ordinal_number;
|
||||
@@ -666,7 +712,7 @@ fn augment_transaction_with_ordinals_inscriptions_data(
|
||||
inscription.inscription_output_value = output.value;
|
||||
inscription.inscriber_address = {
|
||||
let script_pub_key = output.get_script_pubkey_hex();
|
||||
match Script::from_hex(&script_pub_key) {
|
||||
match ScriptBuf::from_hex(&script_pub_key) {
|
||||
Ok(script) => match Address::from_script(&script, network.clone()) {
|
||||
Ok(a) => Some(a.to_string()),
|
||||
_ => None,
|
||||
@@ -703,7 +749,7 @@ fn augment_transaction_with_ordinals_inscriptions_data(
|
||||
logger,
|
||||
"Inscription {} (#{}) detected on Satoshi {} (block #{}, {} transfers)",
|
||||
inscription.inscription_id,
|
||||
inscription.inscription_number,
|
||||
inscription.get_inscription_number(),
|
||||
inscription.ordinal_number,
|
||||
block_identifier.index,
|
||||
inscription.transfers_pre_inscription,
|
||||
@@ -711,10 +757,11 @@ fn augment_transaction_with_ordinals_inscriptions_data(
|
||||
});
|
||||
|
||||
if is_cursed {
|
||||
sequence_cursor.increment_cursed(ctx);
|
||||
sequence_cursor.increment_neg_cursor(ctx);
|
||||
} else {
|
||||
sequence_cursor.increment_blessed(ctx);
|
||||
sequence_cursor.increment_pos_number(ctx);
|
||||
}
|
||||
inscription_subindex += 1;
|
||||
}
|
||||
any_event
|
||||
}
|
||||
@@ -735,7 +782,7 @@ fn consolidate_transaction_with_pre_computed_inscription_data(
|
||||
OrdinalOperation::InscriptionTransferred(_) => continue,
|
||||
};
|
||||
|
||||
let Some(traversal) = inscriptions_data.remove(&(
|
||||
let Some(traversal) = inscriptions_data.get(&(
|
||||
tx.transaction_identifier.clone(),
|
||||
inscription.inscription_input_index,
|
||||
)) else {
|
||||
@@ -745,7 +792,7 @@ fn consolidate_transaction_with_pre_computed_inscription_data(
|
||||
inscription.ordinal_offset = traversal.get_ordinal_coinbase_offset();
|
||||
inscription.ordinal_block_height = traversal.get_ordinal_coinbase_height();
|
||||
inscription.ordinal_number = traversal.ordinal_number;
|
||||
inscription.inscription_number = traversal.inscription_number;
|
||||
inscription.inscription_number = traversal.inscription_number.clone();
|
||||
inscription.transfers_pre_inscription = traversal.transfers;
|
||||
inscription.inscription_fee = tx.metadata.fee;
|
||||
inscription.tx_index = tx_index;
|
||||
@@ -755,8 +802,8 @@ fn consolidate_transaction_with_pre_computed_inscription_data(
|
||||
traversal.transfer_data.inscription_offset_intra_output,
|
||||
);
|
||||
|
||||
if inscription.inscription_number < 0 {
|
||||
inscription.curse_type = Some(OrdinalInscriptionCurseType::Unknown);
|
||||
if inscription.inscription_number.classic < 0 {
|
||||
inscription.curse_type = Some(OrdinalInscriptionCurseType::Generic);
|
||||
}
|
||||
|
||||
if traversal
|
||||
@@ -775,7 +822,7 @@ fn consolidate_transaction_with_pre_computed_inscription_data(
|
||||
inscription.inscription_output_value = output.value;
|
||||
inscription.inscriber_address = {
|
||||
let script_pub_key = output.get_script_pubkey_hex();
|
||||
match Script::from_hex(&script_pub_key) {
|
||||
match ScriptBuf::from_hex(&script_pub_key) {
|
||||
Ok(script) => match Address::from_script(&script, network.clone()) {
|
||||
Ok(a) => Some(a.to_string()),
|
||||
_ => None,
|
||||
@@ -809,16 +856,18 @@ pub fn consolidate_block_with_pre_computed_ordinals_data(
|
||||
let mut inscriptions_data = loop {
|
||||
let results =
|
||||
find_all_inscriptions_in_block(&block.block_identifier.index, inscriptions_db_tx, ctx);
|
||||
if results.len() == expected_inscriptions_count {
|
||||
break results;
|
||||
// TODO: investigate, sporadically the set returned is empty, and requires a retry.
|
||||
if results.is_empty() && expected_inscriptions_count > 0 {
|
||||
ctx.try_log(|logger| {
|
||||
warn!(
|
||||
logger,
|
||||
"Database retuning {} results instead of the expected {expected_inscriptions_count}",
|
||||
results.len()
|
||||
);
|
||||
});
|
||||
continue;
|
||||
}
|
||||
ctx.try_log(|logger| {
|
||||
warn!(
|
||||
logger,
|
||||
"Database retuning {} results instead of the expected {expected_inscriptions_count}",
|
||||
results.len()
|
||||
);
|
||||
});
|
||||
break results;
|
||||
};
|
||||
for (tx_index, tx) in block.transactions.iter_mut().enumerate() {
|
||||
// Add inscriptions data
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use chainhook_sdk::{
|
||||
bitcoincore_rpc_json::bitcoin::{hashes::hex::FromHex, Address, Network, Script},
|
||||
bitcoincore_rpc_json::bitcoin::{Address, Network, ScriptBuf},
|
||||
types::{
|
||||
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData, BlockIdentifier,
|
||||
OrdinalInscriptionTransferData, OrdinalInscriptionTransferDestination, OrdinalOperation,
|
||||
@@ -117,7 +117,7 @@ pub fn augment_transaction_with_ordinals_transfers_data(
|
||||
format_outpoint_to_watch(&tx.transaction_identifier, output_index);
|
||||
let script_pub_key_hex =
|
||||
tx.metadata.outputs[output_index].get_script_pubkey_hex();
|
||||
let updated_address = match Script::from_hex(&script_pub_key_hex) {
|
||||
let updated_address = match ScriptBuf::from_hex(&script_pub_key_hex) {
|
||||
Ok(script) => match Address::from_script(&script, network.clone()) {
|
||||
Ok(address) => OrdinalInscriptionTransferDestination::Transferred(
|
||||
address.to_string(),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use chainhook_sdk::types::{BlockIdentifier, TransactionIdentifier};
|
||||
use chainhook_sdk::types::{BlockIdentifier, OrdinalInscriptionNumber, TransactionIdentifier};
|
||||
use chainhook_sdk::utils::Context;
|
||||
use dashmap::DashMap;
|
||||
use fxhash::FxHasher;
|
||||
@@ -7,10 +7,11 @@ use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::db::{
|
||||
find_lazy_block_at_block_height, open_ordhook_db_conn_rocks_db_loop, TransferData,
|
||||
find_pinned_block_bytes_at_block_height, open_ordhook_db_conn_rocks_db_loop, BlockBytesCursor,
|
||||
TransferData,
|
||||
};
|
||||
|
||||
use crate::db::{LazyBlockTransaction, TraversalResult};
|
||||
use crate::db::{TransactionBytesCursor, TraversalResult};
|
||||
use crate::ord::height::Height;
|
||||
|
||||
pub fn compute_satoshi_number(
|
||||
@@ -18,19 +19,19 @@ pub fn compute_satoshi_number(
|
||||
block_identifier: &BlockIdentifier,
|
||||
transaction_identifier: &TransactionIdentifier,
|
||||
inscription_input_index: usize,
|
||||
inscription_number: i64,
|
||||
traversals_cache: &Arc<
|
||||
DashMap<(u32, [u8; 8]), LazyBlockTransaction, BuildHasherDefault<FxHasher>>,
|
||||
DashMap<(u32, [u8; 8]), TransactionBytesCursor, BuildHasherDefault<FxHasher>>,
|
||||
>,
|
||||
_back_tracking: bool,
|
||||
ctx: &Context,
|
||||
) -> Result<TraversalResult, String> {
|
||||
) -> Result<(TraversalResult, Vec<(u32, [u8; 8])>), String> {
|
||||
let mut inscription_offset_intra_output = 0;
|
||||
let mut inscription_output_index: usize = 0;
|
||||
let mut ordinal_offset = 0;
|
||||
let mut ordinal_block_number = block_identifier.index as u32;
|
||||
let txid = transaction_identifier.get_8_hash_bytes();
|
||||
|
||||
let mut blocks_db = open_ordhook_db_conn_rocks_db_loop(false, &blocks_db_dir, &ctx);
|
||||
let mut back_track = vec![];
|
||||
let blocks_db = open_ordhook_db_conn_rocks_db_loop(false, &blocks_db_dir, &ctx);
|
||||
|
||||
let (sats_ranges, inscription_offset_cross_outputs) = match traversals_cache
|
||||
.get(&(block_identifier.index as u32, txid.clone()))
|
||||
@@ -42,38 +43,28 @@ pub fn compute_satoshi_number(
|
||||
tx.get_cumulated_sats_in_until_input_index(inscription_input_index),
|
||||
)
|
||||
}
|
||||
None => {
|
||||
let mut attempt = 0;
|
||||
loop {
|
||||
match find_lazy_block_at_block_height(
|
||||
ordinal_block_number,
|
||||
3,
|
||||
false,
|
||||
&blocks_db,
|
||||
&ctx,
|
||||
) {
|
||||
None => {
|
||||
if attempt < 3 {
|
||||
attempt += 1;
|
||||
blocks_db =
|
||||
open_ordhook_db_conn_rocks_db_loop(false, &blocks_db_dir, &ctx);
|
||||
} else {
|
||||
return Err(format!("block #{ordinal_block_number} not in database"));
|
||||
}
|
||||
}
|
||||
Some(block) => match block.find_and_serialize_transaction_with_txid(&txid) {
|
||||
None => loop {
|
||||
match find_pinned_block_bytes_at_block_height(ordinal_block_number, 3, &blocks_db, &ctx)
|
||||
{
|
||||
None => {
|
||||
return Err(format!("block #{ordinal_block_number} not in database"));
|
||||
}
|
||||
Some(block_bytes) => {
|
||||
let cursor = BlockBytesCursor::new(&block_bytes.as_ref());
|
||||
match cursor.find_and_serialize_transaction_with_txid(&txid) {
|
||||
Some(tx) => {
|
||||
let sats_ranges = tx.get_sat_ranges();
|
||||
let inscription_offset_cross_outputs =
|
||||
tx.get_cumulated_sats_in_until_input_index(inscription_input_index);
|
||||
traversals_cache.insert((ordinal_block_number, txid.clone()), tx);
|
||||
back_track.push((ordinal_block_number, txid.clone()));
|
||||
break (sats_ranges, inscription_offset_cross_outputs);
|
||||
}
|
||||
None => return Err(format!("txid not in block #{ordinal_block_number}")),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
for (i, (min, max)) in sats_ranges.into_iter().enumerate() {
|
||||
@@ -142,51 +133,56 @@ pub fn compute_satoshi_number(
|
||||
transaction_identifier.hash
|
||||
)
|
||||
});
|
||||
return Ok(TraversalResult {
|
||||
inscription_number: 0,
|
||||
ordinal_number: 0,
|
||||
transfers: 0,
|
||||
inscription_input_index,
|
||||
transaction_identifier_inscription: transaction_identifier.clone(),
|
||||
transfer_data: TransferData {
|
||||
inscription_offset_intra_output,
|
||||
transaction_identifier_location: transaction_identifier.clone(),
|
||||
output_index: inscription_output_index,
|
||||
tx_index: 0,
|
||||
return Ok((
|
||||
TraversalResult {
|
||||
inscription_number: OrdinalInscriptionNumber::zero(),
|
||||
ordinal_number: 0,
|
||||
transfers: 0,
|
||||
inscription_input_index,
|
||||
transaction_identifier_inscription: transaction_identifier.clone(),
|
||||
transfer_data: TransferData {
|
||||
inscription_offset_intra_output,
|
||||
transaction_identifier_location: transaction_identifier.clone(),
|
||||
output_index: inscription_output_index,
|
||||
tx_index: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
back_track,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let lazy_block = {
|
||||
let mut attempt = 0;
|
||||
let pinned_block_bytes = {
|
||||
loop {
|
||||
match find_lazy_block_at_block_height(
|
||||
match find_pinned_block_bytes_at_block_height(
|
||||
ordinal_block_number,
|
||||
3,
|
||||
false,
|
||||
&blocks_db,
|
||||
&ctx,
|
||||
) {
|
||||
Some(block) => break block,
|
||||
None => {
|
||||
if attempt < 3 {
|
||||
attempt += 1;
|
||||
blocks_db =
|
||||
open_ordhook_db_conn_rocks_db_loop(false, &blocks_db_dir, &ctx);
|
||||
} else {
|
||||
return Err(format!("block #{ordinal_block_number} not in database"));
|
||||
}
|
||||
return Err(format!("block #{ordinal_block_number} not in database (traversing {} / {} in progress)", transaction_identifier.hash, block_identifier.index));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let coinbase_txid = lazy_block.get_coinbase_txid();
|
||||
let block_cursor = BlockBytesCursor::new(pinned_block_bytes.as_ref());
|
||||
let txid = tx_cursor.0;
|
||||
let mut block_cursor_tx_iter = block_cursor.iter_tx();
|
||||
let coinbase = block_cursor_tx_iter.next().expect("empty block");
|
||||
|
||||
// evaluate exit condition: did we reach the **final** coinbase transaction
|
||||
if coinbase_txid.eq(&txid) {
|
||||
if coinbase.txid.eq(&txid) {
|
||||
let mut intra_coinbase_output_offset = 0;
|
||||
for (index, output_value) in coinbase.outputs.iter().enumerate() {
|
||||
if index == tx_cursor.1 {
|
||||
break;
|
||||
}
|
||||
intra_coinbase_output_offset += output_value;
|
||||
}
|
||||
ordinal_offset += intra_coinbase_output_offset;
|
||||
|
||||
let subsidy = Height(ordinal_block_number.into()).subsidy();
|
||||
if ordinal_offset < subsidy {
|
||||
// Great!
|
||||
@@ -196,7 +192,7 @@ pub fn compute_satoshi_number(
|
||||
// loop over the transaction fees to detect the right range
|
||||
let mut accumulated_fees = subsidy;
|
||||
|
||||
for tx in lazy_block.iter_tx() {
|
||||
for tx in block_cursor_tx_iter {
|
||||
let mut total_in = 0;
|
||||
for input in tx.inputs.iter() {
|
||||
total_in += input.txin_value;
|
||||
@@ -232,7 +228,7 @@ pub fn compute_satoshi_number(
|
||||
}
|
||||
} else {
|
||||
// isolate the target transaction
|
||||
let lazy_tx = match lazy_block.find_and_serialize_transaction_with_txid(&txid) {
|
||||
let lazy_tx = match block_cursor.find_and_serialize_transaction_with_txid(&txid) {
|
||||
Some(entry) => entry,
|
||||
None => {
|
||||
ctx.try_log(|logger| {
|
||||
@@ -261,6 +257,7 @@ pub fn compute_satoshi_number(
|
||||
sats_in += input.txin_value;
|
||||
|
||||
if sats_out < sats_in {
|
||||
back_track.push((ordinal_block_number, tx_cursor.0.clone()));
|
||||
traversals_cache.insert((ordinal_block_number, tx_cursor.0), lazy_tx.clone());
|
||||
ordinal_offset = sats_out - (sats_in - input.txin_value);
|
||||
ordinal_block_number = input.block_height;
|
||||
@@ -277,19 +274,22 @@ pub fn compute_satoshi_number(
|
||||
transaction_identifier.hash
|
||||
)
|
||||
});
|
||||
return Ok(TraversalResult {
|
||||
inscription_number: 0,
|
||||
ordinal_number: 0,
|
||||
transfers: 0,
|
||||
inscription_input_index,
|
||||
transaction_identifier_inscription: transaction_identifier.clone(),
|
||||
transfer_data: TransferData {
|
||||
inscription_offset_intra_output,
|
||||
transaction_identifier_location: transaction_identifier.clone(),
|
||||
output_index: inscription_output_index,
|
||||
tx_index: 0,
|
||||
return Ok((
|
||||
TraversalResult {
|
||||
inscription_number: OrdinalInscriptionNumber::zero(),
|
||||
ordinal_number: 0,
|
||||
transfers: 0,
|
||||
inscription_input_index,
|
||||
transaction_identifier_inscription: transaction_identifier.clone(),
|
||||
transfer_data: TransferData {
|
||||
inscription_offset_intra_output,
|
||||
transaction_identifier_location: transaction_identifier.clone(),
|
||||
output_index: inscription_output_index,
|
||||
tx_index: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
back_track,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -297,17 +297,20 @@ pub fn compute_satoshi_number(
|
||||
let height = Height(ordinal_block_number.into());
|
||||
let ordinal_number = height.starting_sat().0 + ordinal_offset + inscription_offset_intra_output;
|
||||
|
||||
Ok(TraversalResult {
|
||||
inscription_number,
|
||||
ordinal_number,
|
||||
transfers: hops,
|
||||
inscription_input_index,
|
||||
transaction_identifier_inscription: transaction_identifier.clone(),
|
||||
transfer_data: TransferData {
|
||||
inscription_offset_intra_output,
|
||||
transaction_identifier_location: transaction_identifier.clone(),
|
||||
output_index: inscription_output_index,
|
||||
tx_index: 0,
|
||||
Ok((
|
||||
TraversalResult {
|
||||
inscription_number: OrdinalInscriptionNumber::zero(),
|
||||
ordinal_number,
|
||||
transfers: hops,
|
||||
inscription_input_index,
|
||||
transaction_identifier_inscription: transaction_identifier.clone(),
|
||||
transfer_data: TransferData {
|
||||
inscription_offset_intra_output,
|
||||
transaction_identifier_location: transaction_identifier.clone(),
|
||||
output_index: inscription_output_index,
|
||||
tx_index: 0,
|
||||
},
|
||||
},
|
||||
})
|
||||
back_track,
|
||||
))
|
||||
}
|
||||
|
||||
33372
components/ordhook-core/src/db/fixtures/blocks_json/279671.json
Normal file
33372
components/ordhook-core/src/db/fixtures/blocks_json/279671.json
Normal file
File diff suppressed because one or more lines are too long
@@ -8,14 +8,14 @@ use std::{
|
||||
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use rocksdb::DB;
|
||||
use rocksdb::{DBPinnableSlice, DB};
|
||||
use rusqlite::{Connection, OpenFlags, ToSql, Transaction};
|
||||
use std::io::Cursor;
|
||||
|
||||
use chainhook_sdk::{
|
||||
indexer::bitcoin::BitcoinBlockFullBreakdown,
|
||||
types::{
|
||||
BitcoinBlockData, BlockIdentifier, OrdinalInscriptionRevealData,
|
||||
BitcoinBlockData, BlockIdentifier, OrdinalInscriptionNumber, OrdinalInscriptionRevealData,
|
||||
OrdinalInscriptionTransferData, TransactionIdentifier,
|
||||
},
|
||||
utils::Context,
|
||||
@@ -59,9 +59,11 @@ pub fn initialize_ordhook_db(base_dir: &PathBuf, ctx: &Context) -> Connection {
|
||||
if let Err(e) = conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS inscriptions (
|
||||
inscription_id TEXT NOT NULL PRIMARY KEY,
|
||||
input_index INTEGER NOT NULL,
|
||||
block_height INTEGER NOT NULL,
|
||||
ordinal_number INTEGER NOT NULL,
|
||||
inscription_number INTEGER NOT NULL
|
||||
jubilee_inscription_number INTEGER NOT NULL,
|
||||
classic_inscription_number INTEGER NOT NULL
|
||||
)",
|
||||
[],
|
||||
) {
|
||||
@@ -80,7 +82,14 @@ pub fn initialize_ordhook_db(base_dir: &PathBuf, ctx: &Context) -> Connection {
|
||||
ctx.try_log(|logger| warn!(logger, "unable to query hord.sqlite: {}", e.to_string()));
|
||||
}
|
||||
if let Err(e) = conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS index_inscriptions_on_inscription_number ON inscriptions(inscription_number);",
|
||||
"CREATE INDEX IF NOT EXISTS index_inscriptions_on_jubilee_inscription_number ON inscriptions(jubilee_inscription_number);",
|
||||
[],
|
||||
) {
|
||||
ctx.try_log(|logger| warn!(logger, "unable to query hord.sqlite: {}", e.to_string()));
|
||||
}
|
||||
|
||||
if let Err(e) = conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS index_inscriptions_on_classic_inscription_number ON inscriptions(classic_inscription_number);",
|
||||
[],
|
||||
) {
|
||||
ctx.try_log(|logger| warn!(logger, "unable to query hord.sqlite: {}", e.to_string()));
|
||||
@@ -134,8 +143,9 @@ pub fn initialize_ordhook_db(base_dir: &PathBuf, ctx: &Context) -> Connection {
|
||||
if let Err(e) = conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS sequence_metadata (
|
||||
block_height INTEGER NOT NULL,
|
||||
latest_cursed_inscription_number INTEGER NOT NULL,
|
||||
latest_inscription_number INTEGER NOT NULL
|
||||
nth_classic_pos_number INTEGER NOT NULL,
|
||||
nth_classic_neg_number INTEGER NOT NULL,
|
||||
nth_jubilee_number INTEGER NOT NULL
|
||||
)",
|
||||
[],
|
||||
) {
|
||||
@@ -240,11 +250,6 @@ fn get_default_ordhook_db_file_path_rocks_db(base_dir: &PathBuf) -> PathBuf {
|
||||
|
||||
fn rocks_db_default_options() -> rocksdb::Options {
|
||||
let mut opts = rocksdb::Options::default();
|
||||
opts.create_if_missing(true);
|
||||
// opts.prepare_for_bulk_load();
|
||||
// opts.set_compression_type(rocksdb::DBCompressionType::Lz4);
|
||||
// opts.set_blob_compression_type(rocksdb::DBCompressionType::Lz4);
|
||||
// opts.increase_parallelism(parallelism)
|
||||
// Per rocksdb's documentation:
|
||||
// If cache_index_and_filter_blocks is false (which is default),
|
||||
// the number of index/filter blocks is controlled by option max_open_files.
|
||||
@@ -252,7 +257,23 @@ fn rocks_db_default_options() -> rocksdb::Options {
|
||||
// we recommend setting max_open_files to -1, which means infinity.
|
||||
// This option will preload all filter and index blocks and will not need to maintain LRU of files.
|
||||
// Setting max_open_files to -1 will get you the best possible performance.
|
||||
opts.set_max_open_files(4096);
|
||||
// Additional documentation:
|
||||
// https://betterprogramming.pub/navigating-the-minefield-of-rocksdb-configuration-options-246af1e1d3f9
|
||||
// opts.set_write_buffer_size(64 * 1024 * 1024);
|
||||
// opts.set_blob_file_size(1 * 1024 * 1024 * 1024);
|
||||
// opts.set_target_file_size_base(64 * 1024 * 1024);
|
||||
opts.set_max_open_files(2048);
|
||||
opts.create_if_missing(true);
|
||||
// opts.optimize_for_point_lookup(1 * 1024 * 1024 * 1024);
|
||||
// opts.set_level_zero_stop_writes_trigger(64);
|
||||
// opts.set_level_zero_slowdown_writes_trigger(20);
|
||||
// opts.set_enable_blob_files(true);
|
||||
// opts.set_enable_blob_gc(true);
|
||||
// opts.set_use_fsync(false);
|
||||
// opts.set_bytes_per_sync(8388608);
|
||||
// opts.set_compaction_style(DBCompactionStyle::Universal);
|
||||
// opts.set_compression_type(rocksdb::DBCompressionType::Lz4);
|
||||
// opts.set_blob_compression_type(rocksdb::DBCompressionType::Lz4);
|
||||
opts
|
||||
}
|
||||
|
||||
@@ -287,8 +308,11 @@ pub fn open_ordhook_db_conn_rocks_db_loop(
|
||||
retries += 1;
|
||||
if retries > 10 {
|
||||
ctx.try_log(|logger| {
|
||||
warn!(logger, "Unable to open db: {e}",);
|
||||
warn!(logger, "Unable to open db: {e}. Retrying in 10s",);
|
||||
});
|
||||
sleep(Duration::from_secs(10));
|
||||
} else {
|
||||
sleep(Duration::from_secs(2));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@@ -319,7 +343,7 @@ fn open_readwrite_ordhook_db_conn_rocks_db(
|
||||
|
||||
pub fn insert_entry_in_blocks(
|
||||
block_height: u32,
|
||||
lazy_block: &LazyBlock,
|
||||
block_bytes: &[u8],
|
||||
update_tip: bool,
|
||||
blocks_db_rw: &DB,
|
||||
ctx: &Context,
|
||||
@@ -327,7 +351,7 @@ pub fn insert_entry_in_blocks(
|
||||
let block_height_bytes = block_height.to_be_bytes();
|
||||
let mut retries = 0;
|
||||
loop {
|
||||
let res = blocks_db_rw.put(&block_height_bytes, &lazy_block.bytes);
|
||||
let res = blocks_db_rw.put(&block_height_bytes, block_bytes);
|
||||
match res {
|
||||
Ok(_) => break,
|
||||
Err(e) => {
|
||||
@@ -360,13 +384,12 @@ pub fn find_last_block_inserted(blocks_db: &DB) -> u32 {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_lazy_block_at_block_height(
|
||||
pub fn find_pinned_block_bytes_at_block_height<'a>(
|
||||
block_height: u32,
|
||||
retry: u8,
|
||||
try_iterator: bool,
|
||||
blocks_db: &DB,
|
||||
blocks_db: &'a DB,
|
||||
ctx: &Context,
|
||||
) -> Option<LazyBlock> {
|
||||
) -> Option<DBPinnableSlice<'a>> {
|
||||
let mut attempt = 1;
|
||||
// let mut read_options = rocksdb::ReadOptions::default();
|
||||
// read_options.fill_cache(true);
|
||||
@@ -375,24 +398,9 @@ pub fn find_lazy_block_at_block_height(
|
||||
let mut rng = thread_rng();
|
||||
|
||||
loop {
|
||||
match blocks_db.get(block_height.to_be_bytes()) {
|
||||
Ok(Some(res)) => return Some(LazyBlock::new(res)),
|
||||
match blocks_db.get_pinned(block_height.to_be_bytes()) {
|
||||
Ok(Some(res)) => return Some(res),
|
||||
_ => {
|
||||
if attempt == 1 && try_iterator {
|
||||
ctx.try_log(|logger| {
|
||||
warn!(
|
||||
logger,
|
||||
"Attempt to retrieve block #{} through iterator", block_height,
|
||||
)
|
||||
});
|
||||
let mut iter = blocks_db.iterator(rocksdb::IteratorMode::End);
|
||||
let block_height_bytes = block_height.to_be_bytes();
|
||||
while let Some(Ok((k, res))) = iter.next() {
|
||||
if (*k).eq(&block_height_bytes) {
|
||||
return Some(LazyBlock::new(res.to_vec()));
|
||||
}
|
||||
}
|
||||
}
|
||||
attempt += 1;
|
||||
backoff = 2.0 * backoff + (backoff * rng.gen_range(0.0..1.0));
|
||||
let duration = std::time::Duration::from_millis((backoff * 1_000.0) as u64);
|
||||
@@ -411,6 +419,56 @@ pub fn find_lazy_block_at_block_height(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_block_bytes_at_block_height<'a>(
|
||||
block_height: u32,
|
||||
retry: u8,
|
||||
blocks_db: &DB,
|
||||
ctx: &Context,
|
||||
) -> Option<Vec<u8>> {
|
||||
let mut attempt = 1;
|
||||
// let mut read_options = rocksdb::ReadOptions::default();
|
||||
// read_options.fill_cache(true);
|
||||
// read_options.set_verify_checksums(false);
|
||||
let mut backoff: f64 = 1.0;
|
||||
let mut rng = thread_rng();
|
||||
|
||||
loop {
|
||||
match blocks_db.get(block_height.to_be_bytes()) {
|
||||
Ok(Some(res)) => return Some(res),
|
||||
_ => {
|
||||
attempt += 1;
|
||||
backoff = 2.0 * backoff + (backoff * rng.gen_range(0.0..1.0));
|
||||
let duration = std::time::Duration::from_millis((backoff * 1_000.0) as u64);
|
||||
ctx.try_log(|logger| {
|
||||
warn!(
|
||||
logger,
|
||||
"Unable to find block #{}, will retry in {:?}", block_height, duration
|
||||
)
|
||||
});
|
||||
std::thread::sleep(duration);
|
||||
if attempt > retry {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_compaction(blocks_db_rw: &DB, lim: u32) {
|
||||
let gen = 0u32.to_be_bytes();
|
||||
let _ = blocks_db_rw.compact_range(Some(&gen), Some(&lim.to_be_bytes()));
|
||||
}
|
||||
|
||||
pub fn find_missing_blocks(blocks_db: &DB, start: u32, end: u32, ctx: &Context) -> Vec<u32> {
|
||||
let mut missing_blocks = vec![];
|
||||
for i in start..=end {
|
||||
if find_pinned_block_bytes_at_block_height(i as u32, 0, &blocks_db, ctx).is_none() {
|
||||
missing_blocks.push(i);
|
||||
}
|
||||
}
|
||||
missing_blocks
|
||||
}
|
||||
|
||||
pub fn remove_entry_from_blocks(block_height: u32, blocks_db_rw: &DB, ctx: &Context) {
|
||||
if let Err(e) = blocks_db_rw.delete(block_height.to_be_bytes()) {
|
||||
ctx.try_log(|logger| error!(logger, "{}", e.to_string()));
|
||||
@@ -439,8 +497,8 @@ pub fn insert_entry_in_inscriptions(
|
||||
ctx: &Context,
|
||||
) {
|
||||
while let Err(e) = inscriptions_db_conn_rw.execute(
|
||||
"INSERT INTO inscriptions (inscription_id, ordinal_number, inscription_number, block_height) VALUES (?1, ?2, ?3, ?4)",
|
||||
rusqlite::params![&inscription_data.inscription_id, &inscription_data.ordinal_number, &inscription_data.inscription_number, &block_identifier.index],
|
||||
"INSERT INTO inscriptions (inscription_id, ordinal_number, jubilee_inscription_number, classic_inscription_number, block_height, input_index) VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
|
||||
rusqlite::params![&inscription_data.inscription_id, &inscription_data.ordinal_number, &inscription_data.inscription_number.jubilee, &inscription_data.inscription_number.classic, &block_identifier.index, &inscription_data.inscription_input_index],
|
||||
) {
|
||||
ctx.try_log(|logger| warn!(logger, "unable to query hord.sqlite: {}", e.to_string()));
|
||||
std::thread::sleep(std::time::Duration::from_secs(1));
|
||||
@@ -506,25 +564,34 @@ pub fn update_sequence_metadata_with_block(
|
||||
inscriptions_db_conn_rw: &Connection,
|
||||
ctx: &Context,
|
||||
) {
|
||||
let mut latest_blessed = find_latest_inscription_number_at_block_height(
|
||||
let mut nth_classic_pos_number = find_nth_classic_pos_number_at_block_height(
|
||||
&block.block_identifier.index,
|
||||
inscriptions_db_conn_rw,
|
||||
ctx,
|
||||
)
|
||||
.unwrap_or(0);
|
||||
let mut latest_cursed = find_latest_cursed_inscription_number_at_block_height(
|
||||
let mut nth_classic_neg_number = find_nth_classic_neg_number_at_block_height(
|
||||
&block.block_identifier.index,
|
||||
inscriptions_db_conn_rw,
|
||||
ctx,
|
||||
)
|
||||
.unwrap_or(0);
|
||||
let mut nth_jubilee_number = find_nth_jubilee_number_at_block_height(
|
||||
&block.block_identifier.index,
|
||||
inscriptions_db_conn_rw,
|
||||
ctx,
|
||||
)
|
||||
.unwrap_or(0);
|
||||
for inscription_data in get_inscriptions_revealed_in_block(&block).iter() {
|
||||
latest_blessed = latest_blessed.max(inscription_data.inscription_number);
|
||||
latest_cursed = latest_cursed.min(inscription_data.inscription_number);
|
||||
nth_classic_pos_number =
|
||||
nth_classic_pos_number.max(inscription_data.inscription_number.classic);
|
||||
nth_classic_neg_number =
|
||||
nth_classic_neg_number.min(inscription_data.inscription_number.classic);
|
||||
nth_jubilee_number = nth_jubilee_number.max(inscription_data.inscription_number.jubilee);
|
||||
}
|
||||
while let Err(e) = inscriptions_db_conn_rw.execute(
|
||||
"INSERT INTO sequence_metadata (block_height, latest_inscription_number, latest_cursed_inscription_number) VALUES (?1, ?2, ?3)",
|
||||
rusqlite::params![&block.block_identifier.index, latest_blessed, latest_cursed],
|
||||
"INSERT INTO sequence_metadata (block_height, nth_classic_pos_number, nth_classic_neg_number, nth_jubilee_number) VALUES (?1, ?2, ?3, ?4)",
|
||||
rusqlite::params![&block.block_identifier.index, nth_classic_pos_number, nth_classic_neg_number, nth_jubilee_number],
|
||||
) {
|
||||
ctx.try_log(|logger| warn!(logger, "unable to update sequence_metadata: {}", e.to_string()));
|
||||
std::thread::sleep(std::time::Duration::from_secs(1));
|
||||
@@ -862,37 +929,49 @@ pub fn find_all_inscription_transfers(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_latest_inscription_number_at_block_height(
|
||||
pub fn find_nth_classic_pos_number_at_block_height(
|
||||
block_height: &u64,
|
||||
db_conn: &Connection,
|
||||
ctx: &Context,
|
||||
) -> Option<i64> {
|
||||
let args: &[&dyn ToSql] = &[&block_height.to_sql().unwrap()];
|
||||
let query = "SELECT latest_inscription_number FROM sequence_metadata WHERE block_height < ? ORDER BY block_height DESC LIMIT 1";
|
||||
let query = "SELECT nth_classic_pos_number FROM sequence_metadata WHERE block_height < ? ORDER BY block_height DESC LIMIT 1";
|
||||
perform_query_one(query, args, db_conn, ctx, |row| {
|
||||
let inscription_number: i64 = row.get(0).unwrap();
|
||||
inscription_number
|
||||
})
|
||||
.or_else(|| compute_latest_inscription_number_at_block_height(block_height, db_conn, ctx))
|
||||
.or_else(|| compute_nth_classic_pos_number_at_block_height(block_height, db_conn, ctx))
|
||||
}
|
||||
|
||||
pub fn find_latest_cursed_inscription_number_at_block_height(
|
||||
pub fn find_nth_classic_neg_number_at_block_height(
|
||||
block_height: &u64,
|
||||
db_conn: &Connection,
|
||||
ctx: &Context,
|
||||
) -> Option<i64> {
|
||||
let args: &[&dyn ToSql] = &[&block_height.to_sql().unwrap()];
|
||||
let query = "SELECT latest_cursed_inscription_number FROM sequence_metadata WHERE block_height < ? ORDER BY block_height DESC LIMIT 1";
|
||||
let query = "SELECT nth_classic_neg_number FROM sequence_metadata WHERE block_height < ? ORDER BY block_height DESC LIMIT 1";
|
||||
perform_query_one(query, args, db_conn, ctx, |row| {
|
||||
let inscription_number: i64 = row.get(0).unwrap();
|
||||
inscription_number
|
||||
})
|
||||
.or_else(|| {
|
||||
compute_latest_cursed_inscription_number_at_block_height(block_height, db_conn, ctx)
|
||||
})
|
||||
.or_else(|| compute_nth_classic_neg_number_at_block_height(block_height, db_conn, ctx))
|
||||
}
|
||||
|
||||
pub fn compute_latest_inscription_number_at_block_height(
|
||||
pub fn find_nth_jubilee_number_at_block_height(
|
||||
block_height: &u64,
|
||||
db_conn: &Connection,
|
||||
ctx: &Context,
|
||||
) -> Option<i64> {
|
||||
let args: &[&dyn ToSql] = &[&block_height.to_sql().unwrap()];
|
||||
let query = "SELECT nth_jubilee_number FROM sequence_metadata WHERE block_height < ? ORDER BY block_height DESC LIMIT 1";
|
||||
perform_query_one(query, args, db_conn, ctx, |row| {
|
||||
let inscription_number: i64 = row.get(0).unwrap();
|
||||
inscription_number
|
||||
})
|
||||
.or_else(|| compute_nth_jubilee_number_at_block_height(block_height, db_conn, ctx))
|
||||
}
|
||||
|
||||
pub fn compute_nth_jubilee_number_at_block_height(
|
||||
block_height: &u64,
|
||||
db_conn: &Connection,
|
||||
ctx: &Context,
|
||||
@@ -904,14 +983,14 @@ pub fn compute_latest_inscription_number_at_block_height(
|
||||
)
|
||||
});
|
||||
let args: &[&dyn ToSql] = &[&block_height.to_sql().unwrap()];
|
||||
let query = "SELECT inscription_number FROM inscriptions WHERE block_height < ? ORDER BY inscription_number DESC LIMIT 1";
|
||||
let query = "SELECT jubilee_inscription_number FROM inscriptions WHERE block_height < ? ORDER BY jubilee_inscription_number DESC LIMIT 1";
|
||||
perform_query_one(query, args, db_conn, ctx, |row| {
|
||||
let inscription_number: i64 = row.get(0).unwrap();
|
||||
inscription_number
|
||||
})
|
||||
}
|
||||
|
||||
pub fn compute_latest_cursed_inscription_number_at_block_height(
|
||||
pub fn compute_nth_classic_pos_number_at_block_height(
|
||||
block_height: &u64,
|
||||
db_conn: &Connection,
|
||||
ctx: &Context,
|
||||
@@ -919,11 +998,30 @@ pub fn compute_latest_cursed_inscription_number_at_block_height(
|
||||
ctx.try_log(|logger| {
|
||||
warn!(
|
||||
logger,
|
||||
"Start computing latest_cursed_inscription_number at block height: {block_height}"
|
||||
"Start computing latest_inscription_number at block height: {block_height}"
|
||||
)
|
||||
});
|
||||
let args: &[&dyn ToSql] = &[&block_height.to_sql().unwrap()];
|
||||
let query = "SELECT inscription_number FROM inscriptions WHERE block_height < ? ORDER BY inscription_number ASC LIMIT 1";
|
||||
let query = "SELECT classic_inscription_number FROM inscriptions WHERE block_height < ? ORDER BY classic_inscription_number DESC LIMIT 1";
|
||||
perform_query_one(query, args, db_conn, ctx, |row| {
|
||||
let inscription_number: i64 = row.get(0).unwrap();
|
||||
inscription_number
|
||||
})
|
||||
}
|
||||
|
||||
pub fn compute_nth_classic_neg_number_at_block_height(
|
||||
block_height: &u64,
|
||||
db_conn: &Connection,
|
||||
ctx: &Context,
|
||||
) -> Option<i64> {
|
||||
ctx.try_log(|logger| {
|
||||
warn!(
|
||||
logger,
|
||||
"Start computing nth_classic_neg_number at block height: {block_height}"
|
||||
)
|
||||
});
|
||||
let args: &[&dyn ToSql] = &[&block_height.to_sql().unwrap()];
|
||||
let query = "SELECT classic_inscription_number FROM inscriptions WHERE block_height < ? ORDER BY classic_inscription_number ASC LIMIT 1";
|
||||
perform_query_one(query, args, db_conn, ctx, |row| {
|
||||
let inscription_number: i64 = row.get(0).unwrap();
|
||||
inscription_number
|
||||
@@ -936,7 +1034,7 @@ pub fn find_blessed_inscription_with_ordinal_number(
|
||||
ctx: &Context,
|
||||
) -> Option<String> {
|
||||
let args: &[&dyn ToSql] = &[&ordinal_number.to_sql().unwrap()];
|
||||
let query = "SELECT inscription_id FROM inscriptions WHERE ordinal_number = ? AND inscription_number >= 0";
|
||||
let query = "SELECT inscription_id FROM inscriptions WHERE ordinal_number = ? AND classic_inscription_number >= 0";
|
||||
perform_query_one(query, args, db_conn, ctx, |row| {
|
||||
let inscription_id: String = row.get(0).unwrap();
|
||||
inscription_id
|
||||
@@ -953,13 +1051,16 @@ pub fn find_inscription_with_id(
|
||||
return Err(format!("unable to retrieve location for {inscription_id}"));
|
||||
};
|
||||
let args: &[&dyn ToSql] = &[&inscription_id.to_sql().unwrap()];
|
||||
let query = "SELECT inscription_number, ordinal_number, block_height FROM inscriptions WHERE inscription_id = ?";
|
||||
let query = "SELECT classic_inscription_number, jubilee_inscription_number, ordinal_number, block_height, input_index FROM inscriptions WHERE inscription_id = ?";
|
||||
let entry = perform_query_one(query, args, db_conn, ctx, move |row| {
|
||||
let inscription_number: i64 = row.get(0).unwrap();
|
||||
let ordinal_number: u64 = row.get(1).unwrap();
|
||||
let block_height: u64 = row.get(2).unwrap();
|
||||
let (transaction_identifier_inscription, inscription_input_index) =
|
||||
parse_inscription_id(inscription_id);
|
||||
let inscription_number = OrdinalInscriptionNumber {
|
||||
classic: row.get(0).unwrap(),
|
||||
jubilee: row.get(1).unwrap(),
|
||||
};
|
||||
let ordinal_number: u64 = row.get(2).unwrap();
|
||||
let block_height: u64 = row.get(3).unwrap();
|
||||
let inscription_input_index: usize = row.get(4).unwrap();
|
||||
let (transaction_identifier_inscription, _) = parse_inscription_id(inscription_id);
|
||||
(
|
||||
inscription_number,
|
||||
ordinal_number,
|
||||
@@ -1001,7 +1102,7 @@ pub fn find_all_inscriptions_in_block(
|
||||
let args: &[&dyn ToSql] = &[&block_height.to_sql().unwrap()];
|
||||
|
||||
let mut stmt = loop {
|
||||
match inscriptions_db_tx.prepare("SELECT inscription_number, ordinal_number, inscription_id FROM inscriptions where block_height = ? ORDER BY inscription_number ASC")
|
||||
match inscriptions_db_tx.prepare("SELECT classic_inscription_number, jubilee_inscription_number, ordinal_number, inscription_id, input_index FROM inscriptions where block_height = ?")
|
||||
{
|
||||
Ok(stmt) => break stmt,
|
||||
Err(e) => {
|
||||
@@ -1028,10 +1129,14 @@ pub fn find_all_inscriptions_in_block(
|
||||
loop {
|
||||
match rows.next() {
|
||||
Ok(Some(row)) => {
|
||||
let inscription_number: i64 = row.get(0).unwrap();
|
||||
let ordinal_number: u64 = row.get(1).unwrap();
|
||||
let inscription_id: String = row.get(2).unwrap();
|
||||
let (transaction_identifier_inscription, inscription_input_index) =
|
||||
let inscription_number = OrdinalInscriptionNumber {
|
||||
classic: row.get(0).unwrap(),
|
||||
jubilee: row.get(1).unwrap(),
|
||||
};
|
||||
let ordinal_number: u64 = row.get(2).unwrap();
|
||||
let inscription_id: String = row.get(3).unwrap();
|
||||
let inscription_input_index: usize = row.get(4).unwrap();
|
||||
let (transaction_identifier_inscription, _) =
|
||||
{ parse_inscription_id(&inscription_id) };
|
||||
let Some(transfer_data) = transfers_data
|
||||
.get(&inscription_id)
|
||||
@@ -1077,13 +1182,6 @@ pub struct WatchedSatpoint {
|
||||
pub offset: u64,
|
||||
}
|
||||
|
||||
impl WatchedSatpoint {
|
||||
pub fn get_genesis_satpoint(&self) -> String {
|
||||
let (transaction_id, input) = parse_inscription_id(&self.inscription_id);
|
||||
format!("{}:{}", transaction_id.hash, input)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_watched_satpoint_for_inscription(
|
||||
inscription_id: &str,
|
||||
db_conn: &Connection,
|
||||
@@ -1237,7 +1335,7 @@ pub fn delete_data_in_ordhook_db(
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TraversalResult {
|
||||
pub inscription_number: i64,
|
||||
pub inscription_number: OrdinalInscriptionNumber,
|
||||
pub inscription_input_index: usize,
|
||||
pub transaction_identifier_inscription: TransactionIdentifier,
|
||||
pub ordinal_number: u64,
|
||||
@@ -1278,6 +1376,17 @@ pub fn format_satpoint_to_watch(
|
||||
)
|
||||
}
|
||||
|
||||
pub fn format_inscription_id(
|
||||
transaction_identifier: &TransactionIdentifier,
|
||||
inscription_subindex: usize,
|
||||
) -> String {
|
||||
format!(
|
||||
"{}i{}",
|
||||
transaction_identifier.get_hash_bytes_str(),
|
||||
inscription_subindex,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse_satpoint_to_watch(outpoint_to_watch: &str) -> (TransactionIdentifier, usize, u64) {
|
||||
let comps: Vec<&str> = outpoint_to_watch.split(":").collect();
|
||||
let tx = TransactionIdentifier::new(comps[0]);
|
||||
@@ -1324,21 +1433,21 @@ pub fn parse_outpoint_to_watch(outpoint_to_watch: &str) -> (TransactionIdentifie
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LazyBlock {
|
||||
pub bytes: Vec<u8>,
|
||||
pub struct BlockBytesCursor<'a> {
|
||||
pub bytes: &'a [u8],
|
||||
pub tx_len: u16,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LazyBlockTransaction {
|
||||
pub struct TransactionBytesCursor {
|
||||
pub txid: [u8; 8],
|
||||
pub inputs: Vec<LazyBlockTransactionInput>,
|
||||
pub inputs: Vec<TransactionInputBytesCursor>,
|
||||
pub outputs: Vec<u64>,
|
||||
}
|
||||
|
||||
impl LazyBlockTransaction {
|
||||
impl TransactionBytesCursor {
|
||||
pub fn get_average_bytes_size() -> usize {
|
||||
TXID_LEN + 3 * LazyBlockTransactionInput::get_average_bytes_size() + 3 * SATS_LEN
|
||||
TXID_LEN + 3 * TransactionInputBytesCursor::get_average_bytes_size() + 3 * SATS_LEN
|
||||
}
|
||||
|
||||
pub fn get_sat_ranges(&self) -> Vec<(u64, u64)> {
|
||||
@@ -1364,14 +1473,14 @@ impl LazyBlockTransaction {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LazyBlockTransactionInput {
|
||||
pub struct TransactionInputBytesCursor {
|
||||
pub txin: [u8; 8],
|
||||
pub block_height: u32,
|
||||
pub vout: u16,
|
||||
pub txin_value: u64,
|
||||
}
|
||||
|
||||
impl LazyBlockTransactionInput {
|
||||
impl TransactionInputBytesCursor {
|
||||
pub fn get_average_bytes_size() -> usize {
|
||||
TXID_LEN + SATS_LEN + 4 + 2
|
||||
}
|
||||
@@ -1382,16 +1491,20 @@ const SATS_LEN: usize = 8;
|
||||
const INPUT_SIZE: usize = TXID_LEN + 4 + 2 + SATS_LEN;
|
||||
const OUTPUT_SIZE: usize = 8;
|
||||
|
||||
impl LazyBlock {
|
||||
pub fn new(bytes: Vec<u8>) -> LazyBlock {
|
||||
impl<'a> BlockBytesCursor<'a> {
|
||||
pub fn new(bytes: &[u8]) -> BlockBytesCursor {
|
||||
let tx_len = u16::from_be_bytes([bytes[0], bytes[1]]);
|
||||
LazyBlock { bytes, tx_len }
|
||||
BlockBytesCursor { bytes, tx_len }
|
||||
}
|
||||
|
||||
pub fn get_coinbase_data_pos(&self) -> usize {
|
||||
(2 + self.tx_len * 2 * 2) as usize
|
||||
}
|
||||
|
||||
pub fn get_coinbase_outputs_len(&self) -> usize {
|
||||
u16::from_be_bytes([self.bytes[4], self.bytes[5]]) as usize
|
||||
}
|
||||
|
||||
pub fn get_u64_at_pos(&self, pos: usize) -> u64 {
|
||||
u64::from_be_bytes([
|
||||
self.bytes[pos],
|
||||
@@ -1410,13 +1523,8 @@ impl LazyBlock {
|
||||
&self.bytes[pos..pos + TXID_LEN]
|
||||
}
|
||||
|
||||
pub fn get_coinbase_sats(&self) -> u64 {
|
||||
let pos = self.get_coinbase_data_pos() + TXID_LEN;
|
||||
self.get_u64_at_pos(pos)
|
||||
}
|
||||
|
||||
pub fn get_transactions_data_pos(&self) -> usize {
|
||||
self.get_coinbase_data_pos() + TXID_LEN + SATS_LEN
|
||||
self.get_coinbase_data_pos()
|
||||
}
|
||||
|
||||
pub fn get_transaction_format(&self, index: u16) -> (u16, u16, usize) {
|
||||
@@ -1431,13 +1539,13 @@ impl LazyBlock {
|
||||
(inputs, outputs, size)
|
||||
}
|
||||
|
||||
pub fn get_lazy_transaction_at_pos(
|
||||
pub fn get_transaction_bytes_cursor_at_pos(
|
||||
&self,
|
||||
cursor: &mut Cursor<&Vec<u8>>,
|
||||
cursor: &mut Cursor<&[u8]>,
|
||||
txid: [u8; 8],
|
||||
inputs_len: u16,
|
||||
outputs_len: u16,
|
||||
) -> LazyBlockTransaction {
|
||||
) -> TransactionBytesCursor {
|
||||
let mut inputs = Vec::with_capacity(inputs_len as usize);
|
||||
for _ in 0..inputs_len {
|
||||
let mut txin = [0u8; 8];
|
||||
@@ -1450,7 +1558,7 @@ impl LazyBlock {
|
||||
cursor.read_exact(&mut vout).expect("data corrupted");
|
||||
let mut txin_value = [0u8; 8];
|
||||
cursor.read_exact(&mut txin_value).expect("data corrupted");
|
||||
inputs.push(LazyBlockTransactionInput {
|
||||
inputs.push(TransactionInputBytesCursor {
|
||||
txin: txin,
|
||||
block_height: u32::from_be_bytes(block_height),
|
||||
vout: u16::from_be_bytes(vout),
|
||||
@@ -1463,7 +1571,7 @@ impl LazyBlock {
|
||||
cursor.read_exact(&mut value).expect("data corrupted");
|
||||
outputs.push(u64::from_be_bytes(value))
|
||||
}
|
||||
LazyBlockTransaction {
|
||||
TransactionBytesCursor {
|
||||
txid,
|
||||
inputs,
|
||||
outputs,
|
||||
@@ -1473,10 +1581,10 @@ impl LazyBlock {
|
||||
pub fn find_and_serialize_transaction_with_txid(
|
||||
&self,
|
||||
searched_txid: &[u8],
|
||||
) -> Option<LazyBlockTransaction> {
|
||||
) -> Option<TransactionBytesCursor> {
|
||||
// println!("{:?}", hex::encode(searched_txid));
|
||||
let mut entry = None;
|
||||
let mut cursor = Cursor::new(&self.bytes);
|
||||
let mut cursor = Cursor::new(self.bytes);
|
||||
let mut cumulated_offset = 0;
|
||||
let mut i = 0;
|
||||
while entry.is_none() {
|
||||
@@ -1488,7 +1596,7 @@ impl LazyBlock {
|
||||
let _ = cursor.read_exact(&mut txid);
|
||||
// println!("-> {}", hex::encode(txid));
|
||||
if searched_txid.eq(&txid) {
|
||||
entry = Some(self.get_lazy_transaction_at_pos(
|
||||
entry = Some(self.get_transaction_bytes_cursor_at_pos(
|
||||
&mut cursor,
|
||||
txid,
|
||||
inputs_len,
|
||||
@@ -1505,19 +1613,19 @@ impl LazyBlock {
|
||||
entry
|
||||
}
|
||||
|
||||
pub fn iter_tx(&self) -> LazyBlockTransactionIterator {
|
||||
LazyBlockTransactionIterator::new(&self)
|
||||
pub fn iter_tx(&self) -> TransactionBytesCursorIterator {
|
||||
TransactionBytesCursorIterator::new(&self)
|
||||
}
|
||||
|
||||
pub fn from_full_block(block: &BitcoinBlockFullBreakdown) -> std::io::Result<LazyBlock> {
|
||||
pub fn from_full_block<'b>(block: &BitcoinBlockFullBreakdown) -> std::io::Result<Vec<u8>> {
|
||||
let mut buffer = vec![];
|
||||
// Number of transactions in the block (not including coinbase)
|
||||
let tx_len = block.tx.len() as u16 - 1;
|
||||
let tx_len = block.tx.len() as u16;
|
||||
buffer.write(&tx_len.to_be_bytes())?;
|
||||
// For each transaction:
|
||||
let u16_max = u16::MAX as usize;
|
||||
for tx in block.tx.iter().skip(1) {
|
||||
let inputs_len = if tx.vin.len() > u16_max {
|
||||
for (i, tx) in block.tx.iter().enumerate() {
|
||||
let mut inputs_len = if tx.vin.len() > u16_max {
|
||||
0
|
||||
} else {
|
||||
tx.vin.len() as u16
|
||||
@@ -1527,27 +1635,16 @@ impl LazyBlock {
|
||||
} else {
|
||||
tx.vout.len() as u16
|
||||
};
|
||||
if i == 0 {
|
||||
inputs_len = 0;
|
||||
}
|
||||
// Number of inputs
|
||||
buffer.write(&inputs_len.to_be_bytes())?;
|
||||
// Number of outputs
|
||||
buffer.write(&outputs_len.to_be_bytes())?;
|
||||
}
|
||||
// Coinbase transaction txid - 8 first bytes
|
||||
let coinbase_txid = {
|
||||
let txid = hex::decode(block.tx[0].txid.to_string()).unwrap();
|
||||
[
|
||||
txid[0], txid[1], txid[2], txid[3], txid[4], txid[5], txid[6], txid[7],
|
||||
]
|
||||
};
|
||||
buffer.write_all(&coinbase_txid)?;
|
||||
// Coinbase transaction value
|
||||
let mut coinbase_value = 0;
|
||||
for coinbase_output in block.tx[0].vout.iter() {
|
||||
coinbase_value += coinbase_output.value.to_sat();
|
||||
}
|
||||
buffer.write(&coinbase_value.to_be_bytes())?;
|
||||
// For each transaction:
|
||||
for tx in block.tx.iter().skip(1) {
|
||||
for tx in block.tx.iter() {
|
||||
// txid - 8 first bytes
|
||||
let txid = {
|
||||
let txid = hex::decode(tx.txid.to_string()).unwrap();
|
||||
@@ -1572,8 +1669,11 @@ impl LazyBlock {
|
||||
for i in 0..inputs_len {
|
||||
let input = &tx.vin[i];
|
||||
// txin - 8 first bytes
|
||||
let Some(input_txid) = input.txid.as_ref() else {
|
||||
continue;
|
||||
};
|
||||
let txin = {
|
||||
let txid = hex::decode(input.txid.as_ref().unwrap().to_string()).unwrap();
|
||||
let txid = hex::decode(input_txid).unwrap();
|
||||
[
|
||||
txid[0], txid[1], txid[2], txid[3], txid[4], txid[5], txid[6], txid[7],
|
||||
]
|
||||
@@ -1596,53 +1696,48 @@ impl LazyBlock {
|
||||
buffer.write(&sats.to_be_bytes())?;
|
||||
}
|
||||
}
|
||||
Ok(Self::new(buffer))
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
pub fn from_standardized_block(block: &BitcoinBlockData) -> std::io::Result<LazyBlock> {
|
||||
pub fn from_standardized_block<'b>(block: &BitcoinBlockData) -> std::io::Result<Vec<u8>> {
|
||||
let mut buffer = vec![];
|
||||
// Number of transactions in the block (not including coinbase)
|
||||
let tx_len = block.transactions.len() as u16 - 1;
|
||||
let tx_len = block.transactions.len() as u16;
|
||||
buffer.write(&tx_len.to_be_bytes())?;
|
||||
// For each transaction:
|
||||
for tx in block.transactions.iter().skip(1) {
|
||||
let inputs_len = tx.metadata.inputs.len() as u16;
|
||||
for (i, tx) in block.transactions.iter().enumerate() {
|
||||
let inputs_len = if i > 0 {
|
||||
tx.metadata.inputs.len() as u16
|
||||
} else {
|
||||
0
|
||||
};
|
||||
let outputs_len = tx.metadata.outputs.len() as u16;
|
||||
// Number of inputs
|
||||
buffer.write(&inputs_len.to_be_bytes())?;
|
||||
// Number of outputs
|
||||
buffer.write(&outputs_len.to_be_bytes())?;
|
||||
}
|
||||
// Coinbase transaction txid - 8 first bytes
|
||||
let coinbase_txid = block.transactions[0]
|
||||
.transaction_identifier
|
||||
.get_8_hash_bytes();
|
||||
buffer.write_all(&coinbase_txid)?;
|
||||
// Coinbase transaction value
|
||||
let mut coinbase_value = 0;
|
||||
for coinbase_output in block.transactions[0].metadata.outputs.iter() {
|
||||
coinbase_value += coinbase_output.value;
|
||||
}
|
||||
buffer.write_all(&coinbase_value.to_be_bytes())?;
|
||||
// For each transaction:
|
||||
for tx in block.transactions.iter().skip(1) {
|
||||
for (i, tx) in block.transactions.iter().enumerate() {
|
||||
// txid - 8 first bytes
|
||||
let txid = tx.transaction_identifier.get_8_hash_bytes();
|
||||
buffer.write_all(&txid)?;
|
||||
// For each transaction input:
|
||||
for input in tx.metadata.inputs.iter() {
|
||||
// txin - 8 first bytes
|
||||
let txin = input.previous_output.txid.get_8_hash_bytes();
|
||||
buffer.write_all(&txin)?;
|
||||
// txin's block height
|
||||
let block_height = input.previous_output.block_height as u32;
|
||||
buffer.write(&block_height.to_be_bytes())?;
|
||||
// txin's vout index
|
||||
let vout = input.previous_output.vout as u16;
|
||||
buffer.write(&vout.to_be_bytes())?;
|
||||
// txin's sats value
|
||||
let sats = input.previous_output.value;
|
||||
buffer.write(&sats.to_be_bytes())?;
|
||||
// For each non coinbase transaction input:
|
||||
if i > 0 {
|
||||
for input in tx.metadata.inputs.iter() {
|
||||
// txin - 8 first bytes
|
||||
let txin = input.previous_output.txid.get_8_hash_bytes();
|
||||
buffer.write_all(&txin)?;
|
||||
// txin's block height
|
||||
let block_height = input.previous_output.block_height as u32;
|
||||
buffer.write(&block_height.to_be_bytes())?;
|
||||
// txin's vout index
|
||||
let vout = input.previous_output.vout as u16;
|
||||
buffer.write(&vout.to_be_bytes())?;
|
||||
// txin's sats value
|
||||
let sats = input.previous_output.value;
|
||||
buffer.write(&sats.to_be_bytes())?;
|
||||
}
|
||||
}
|
||||
// For each transaction output:
|
||||
for output in tx.metadata.outputs.iter() {
|
||||
@@ -1650,43 +1745,45 @@ impl LazyBlock {
|
||||
buffer.write(&sats.to_be_bytes())?;
|
||||
}
|
||||
}
|
||||
Ok(Self::new(buffer))
|
||||
Ok(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LazyBlockTransactionIterator<'a> {
|
||||
lazy_block: &'a LazyBlock,
|
||||
pub struct TransactionBytesCursorIterator<'a> {
|
||||
block_bytes_cursor: &'a BlockBytesCursor<'a>,
|
||||
tx_index: u16,
|
||||
cumulated_offset: usize,
|
||||
}
|
||||
|
||||
impl<'a> LazyBlockTransactionIterator<'a> {
|
||||
pub fn new(lazy_block: &'a LazyBlock) -> LazyBlockTransactionIterator<'a> {
|
||||
LazyBlockTransactionIterator {
|
||||
lazy_block,
|
||||
impl<'a> TransactionBytesCursorIterator<'a> {
|
||||
pub fn new(block_bytes_cursor: &'a BlockBytesCursor) -> TransactionBytesCursorIterator<'a> {
|
||||
TransactionBytesCursorIterator {
|
||||
block_bytes_cursor,
|
||||
tx_index: 0,
|
||||
cumulated_offset: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for LazyBlockTransactionIterator<'a> {
|
||||
type Item = LazyBlockTransaction;
|
||||
impl<'a> Iterator for TransactionBytesCursorIterator<'a> {
|
||||
type Item = TransactionBytesCursor;
|
||||
|
||||
fn next(&mut self) -> Option<LazyBlockTransaction> {
|
||||
if self.tx_index >= self.lazy_block.tx_len {
|
||||
fn next(&mut self) -> Option<TransactionBytesCursor> {
|
||||
if self.tx_index >= self.block_bytes_cursor.tx_len {
|
||||
return None;
|
||||
}
|
||||
let pos = self.lazy_block.get_transactions_data_pos() + self.cumulated_offset;
|
||||
let (inputs_len, outputs_len, size) = self.lazy_block.get_transaction_format(self.tx_index);
|
||||
let pos = self.block_bytes_cursor.get_transactions_data_pos() + self.cumulated_offset;
|
||||
let (inputs_len, outputs_len, size) = self
|
||||
.block_bytes_cursor
|
||||
.get_transaction_format(self.tx_index);
|
||||
// println!("{inputs_len} / {outputs_len} / {size}");
|
||||
let mut cursor = Cursor::new(&self.lazy_block.bytes);
|
||||
let mut cursor = Cursor::new(self.block_bytes_cursor.bytes);
|
||||
cursor.set_position(pos as u64);
|
||||
let mut txid = [0u8; 8];
|
||||
let _ = cursor.read_exact(&mut txid);
|
||||
self.cumulated_offset += size;
|
||||
self.tx_index += 1;
|
||||
Some(self.lazy_block.get_lazy_transaction_at_pos(
|
||||
Some(self.block_bytes_cursor.get_transaction_bytes_cursor_at_pos(
|
||||
&mut cursor,
|
||||
txid,
|
||||
inputs_len,
|
||||
@@ -1694,3 +1791,103 @@ impl<'a> Iterator for LazyBlockTransactionIterator<'a> {
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chainhook_sdk::{
|
||||
indexer::bitcoin::{parse_downloaded_block, standardize_bitcoin_block},
|
||||
types::BitcoinNetwork,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_block_cursor_roundtrip() {
|
||||
let ctx = Context::empty();
|
||||
let block = include_str!("./fixtures/blocks_json/279671.json");
|
||||
let decoded_block =
|
||||
parse_downloaded_block(block.as_bytes().to_vec()).expect("unable to decode block");
|
||||
let standardized_block =
|
||||
standardize_bitcoin_block(decoded_block.clone(), &BitcoinNetwork::Mainnet, &ctx)
|
||||
.expect("unable to standardize block");
|
||||
|
||||
for (index, (tx_in, tx_out)) in decoded_block
|
||||
.tx
|
||||
.iter()
|
||||
.zip(standardized_block.transactions.iter())
|
||||
.enumerate()
|
||||
{
|
||||
// Test outputs
|
||||
assert_eq!(tx_in.vout.len(), tx_out.metadata.outputs.len());
|
||||
for (output, src) in tx_out.metadata.outputs.iter().zip(tx_in.vout.iter()) {
|
||||
assert_eq!(output.value, src.value.to_sat());
|
||||
}
|
||||
// Test inputs (non-coinbase transactions only)
|
||||
if index == 0 {
|
||||
continue;
|
||||
}
|
||||
assert_eq!(tx_in.vin.len(), tx_out.metadata.inputs.len());
|
||||
for (input, src) in tx_out.metadata.inputs.iter().zip(tx_in.vin.iter()) {
|
||||
assert_eq!(
|
||||
input.previous_output.block_height,
|
||||
src.prevout.as_ref().unwrap().height
|
||||
);
|
||||
assert_eq!(
|
||||
input.previous_output.value,
|
||||
src.prevout.as_ref().unwrap().value.to_sat()
|
||||
);
|
||||
let txin = hex::decode(src.txid.as_ref().unwrap()).unwrap();
|
||||
assert_eq!(input.previous_output.txid.get_hash_bytes(), txin);
|
||||
assert_eq!(input.previous_output.vout, src.vout.unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
let bytes = BlockBytesCursor::from_full_block(&decoded_block).expect("unable to serialize");
|
||||
let bytes_via_standardized = BlockBytesCursor::from_standardized_block(&standardized_block)
|
||||
.expect("unable to serialize");
|
||||
assert_eq!(bytes, bytes_via_standardized);
|
||||
|
||||
let block_bytes_cursor = BlockBytesCursor::new(&bytes);
|
||||
assert_eq!(decoded_block.tx.len(), block_bytes_cursor.tx_len as usize);
|
||||
|
||||
// Test helpers
|
||||
let coinbase_txid = block_bytes_cursor.get_coinbase_txid();
|
||||
assert_eq!(
|
||||
coinbase_txid,
|
||||
standardized_block.transactions[0]
|
||||
.transaction_identifier
|
||||
.get_8_hash_bytes()
|
||||
);
|
||||
|
||||
// Test transactions
|
||||
for (index, (tx_in, tx_out)) in decoded_block
|
||||
.tx
|
||||
.iter()
|
||||
.zip(block_bytes_cursor.iter_tx())
|
||||
.enumerate()
|
||||
{
|
||||
// Test outputs
|
||||
assert_eq!(tx_in.vout.len(), tx_out.outputs.len());
|
||||
for (sats, src) in tx_out.outputs.iter().zip(tx_in.vout.iter()) {
|
||||
assert_eq!(*sats, src.value.to_sat());
|
||||
}
|
||||
// Test inputs (non-coinbase transactions only)
|
||||
if index == 0 {
|
||||
continue;
|
||||
}
|
||||
assert_eq!(tx_in.vin.len(), tx_out.inputs.len());
|
||||
for (tx_bytes_cursor, src) in tx_out.inputs.iter().zip(tx_in.vin.iter()) {
|
||||
assert_eq!(
|
||||
tx_bytes_cursor.block_height as u64,
|
||||
src.prevout.as_ref().unwrap().height
|
||||
);
|
||||
assert_eq!(
|
||||
tx_bytes_cursor.txin_value,
|
||||
src.prevout.as_ref().unwrap().value.to_sat()
|
||||
);
|
||||
let txin = hex::decode(src.txid.as_ref().unwrap()).unwrap();
|
||||
assert_eq!(tx_bytes_cursor.txin, txin[0..tx_bytes_cursor.txin.len()]);
|
||||
assert_eq!(tx_bytes_cursor.vout as u32, src.vout.unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,10 +65,7 @@ impl Chain {
|
||||
bitcoin::blockdata::constants::genesis_block(self.network())
|
||||
}
|
||||
|
||||
pub fn address_from_script(
|
||||
self,
|
||||
script: &Script,
|
||||
) -> Result<Address, bitcoin::util::address::Error> {
|
||||
pub fn address_from_script(self, script: &Script) -> Result<Address, bitcoin::address::Error> {
|
||||
Address::from_script(script, self.network())
|
||||
}
|
||||
|
||||
|
||||
1033
components/ordhook-core/src/ord/envelope.rs
Normal file
1033
components/ordhook-core/src/ord/envelope.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,4 @@
|
||||
use chainhook_sdk::bitcoincore_rpc::bitcoin::blockdata::constants::COIN_VALUE;
|
||||
|
||||
use super::{height::Height, sat::Sat, SUBSIDY_HALVING_INTERVAL};
|
||||
use super::{height::Height, sat::Sat, COIN_VALUE, SUBSIDY_HALVING_INTERVAL};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug, PartialOrd)]
|
||||
pub(crate) struct Epoch(pub(crate) u64);
|
||||
|
||||
614
components/ordhook-core/src/ord/inscription.rs
Normal file
614
components/ordhook-core/src/ord/inscription.rs
Normal file
@@ -0,0 +1,614 @@
|
||||
use chainhook_sdk::bitcoin::{hashes::Hash, Txid};
|
||||
|
||||
use super::{inscription_id::InscriptionId, media::Media};
|
||||
|
||||
use {
|
||||
super::*,
|
||||
chainhook_sdk::bitcoin::{
|
||||
blockdata::{
|
||||
opcodes,
|
||||
script::{self, PushBytesBuf},
|
||||
},
|
||||
ScriptBuf,
|
||||
},
|
||||
std::str,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq, Default)]
|
||||
pub struct Inscription {
|
||||
pub body: Option<Vec<u8>>,
|
||||
pub content_encoding: Option<Vec<u8>>,
|
||||
pub content_type: Option<Vec<u8>>,
|
||||
pub duplicate_field: bool,
|
||||
pub incomplete_field: bool,
|
||||
pub metadata: Option<Vec<u8>>,
|
||||
pub metaprotocol: Option<Vec<u8>>,
|
||||
pub parent: Option<Vec<u8>>,
|
||||
pub pointer: Option<Vec<u8>>,
|
||||
pub unrecognized_even_field: bool,
|
||||
}
|
||||
|
||||
impl Inscription {
|
||||
#[cfg(test)]
|
||||
pub(crate) fn new(content_type: Option<Vec<u8>>, body: Option<Vec<u8>>) -> Self {
|
||||
Self {
|
||||
content_type,
|
||||
body,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn pointer_value(pointer: u64) -> Vec<u8> {
|
||||
let mut bytes = pointer.to_le_bytes().to_vec();
|
||||
|
||||
while bytes.last().copied() == Some(0) {
|
||||
bytes.pop();
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
pub(crate) fn append_reveal_script_to_builder(
|
||||
&self,
|
||||
mut builder: script::Builder,
|
||||
) -> script::Builder {
|
||||
builder = builder
|
||||
.push_opcode(opcodes::OP_FALSE)
|
||||
.push_opcode(opcodes::all::OP_IF)
|
||||
.push_slice(envelope::PROTOCOL_ID);
|
||||
|
||||
if let Some(content_type) = self.content_type.clone() {
|
||||
builder = builder
|
||||
.push_slice(envelope::CONTENT_TYPE_TAG)
|
||||
.push_slice(PushBytesBuf::try_from(content_type).unwrap());
|
||||
}
|
||||
|
||||
if let Some(content_encoding) = self.content_encoding.clone() {
|
||||
builder = builder
|
||||
.push_slice(envelope::CONTENT_ENCODING_TAG)
|
||||
.push_slice(PushBytesBuf::try_from(content_encoding).unwrap());
|
||||
}
|
||||
|
||||
if let Some(protocol) = self.metaprotocol.clone() {
|
||||
builder = builder
|
||||
.push_slice(envelope::METAPROTOCOL_TAG)
|
||||
.push_slice(PushBytesBuf::try_from(protocol).unwrap());
|
||||
}
|
||||
|
||||
if let Some(parent) = self.parent.clone() {
|
||||
builder = builder
|
||||
.push_slice(envelope::PARENT_TAG)
|
||||
.push_slice(PushBytesBuf::try_from(parent).unwrap());
|
||||
}
|
||||
|
||||
if let Some(pointer) = self.pointer.clone() {
|
||||
builder = builder
|
||||
.push_slice(envelope::POINTER_TAG)
|
||||
.push_slice(PushBytesBuf::try_from(pointer).unwrap());
|
||||
}
|
||||
|
||||
if let Some(metadata) = &self.metadata {
|
||||
for chunk in metadata.chunks(520) {
|
||||
builder = builder.push_slice(envelope::METADATA_TAG);
|
||||
builder = builder.push_slice(PushBytesBuf::try_from(chunk.to_vec()).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(body) = &self.body {
|
||||
builder = builder.push_slice(envelope::BODY_TAG);
|
||||
for chunk in body.chunks(520) {
|
||||
builder = builder.push_slice(PushBytesBuf::try_from(chunk.to_vec()).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
builder.push_opcode(opcodes::all::OP_ENDIF)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn append_reveal_script(&self, builder: script::Builder) -> ScriptBuf {
|
||||
self.append_reveal_script_to_builder(builder).into_script()
|
||||
}
|
||||
|
||||
pub(crate) fn append_batch_reveal_script_to_builder(
|
||||
inscriptions: &[Inscription],
|
||||
mut builder: script::Builder,
|
||||
) -> script::Builder {
|
||||
for inscription in inscriptions {
|
||||
builder = inscription.append_reveal_script_to_builder(builder);
|
||||
}
|
||||
|
||||
builder
|
||||
}
|
||||
|
||||
pub(crate) fn append_batch_reveal_script(
|
||||
inscriptions: &[Inscription],
|
||||
builder: script::Builder,
|
||||
) -> ScriptBuf {
|
||||
Inscription::append_batch_reveal_script_to_builder(inscriptions, builder).into_script()
|
||||
}
|
||||
|
||||
pub(crate) fn media(&self) -> Media {
|
||||
if self.body.is_none() {
|
||||
return Media::Unknown;
|
||||
}
|
||||
|
||||
let Some(content_type) = self.content_type() else {
|
||||
return Media::Unknown;
|
||||
};
|
||||
|
||||
content_type.parse().unwrap_or(Media::Unknown)
|
||||
}
|
||||
|
||||
pub(crate) fn body(&self) -> Option<&[u8]> {
|
||||
Some(self.body.as_ref()?)
|
||||
}
|
||||
|
||||
pub(crate) fn into_body(self) -> Option<Vec<u8>> {
|
||||
self.body
|
||||
}
|
||||
|
||||
pub(crate) fn content_length(&self) -> Option<usize> {
|
||||
Some(self.body()?.len())
|
||||
}
|
||||
|
||||
pub(crate) fn content_type(&self) -> Option<&str> {
|
||||
str::from_utf8(self.content_type.as_ref()?).ok()
|
||||
}
|
||||
|
||||
pub(crate) fn metaprotocol(&self) -> Option<&str> {
|
||||
str::from_utf8(self.metaprotocol.as_ref()?).ok()
|
||||
}
|
||||
|
||||
pub(crate) fn parent(&self) -> Option<InscriptionId> {
|
||||
use chainhook_sdk::bitcoin::hash_types::Txid as TXID_LEN;
|
||||
let value = self.parent.as_ref()?;
|
||||
|
||||
if value.len() < TXID_LEN::LEN {
|
||||
return None;
|
||||
}
|
||||
|
||||
if value.len() > TXID_LEN::LEN + 4 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (txid, index) = value.split_at(TXID_LEN::LEN);
|
||||
|
||||
if let Some(last) = index.last() {
|
||||
// Accept fixed length encoding with 4 bytes (with potential trailing zeroes)
|
||||
// or variable length (no trailing zeroes)
|
||||
if index.len() != 4 && *last == 0 {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
let txid = Txid::from_slice(txid).unwrap();
|
||||
|
||||
let index = [
|
||||
index.first().copied().unwrap_or(0),
|
||||
index.get(1).copied().unwrap_or(0),
|
||||
index.get(2).copied().unwrap_or(0),
|
||||
index.get(3).copied().unwrap_or(0),
|
||||
];
|
||||
|
||||
let index = u32::from_le_bytes(index);
|
||||
|
||||
Some(InscriptionId { txid, index })
|
||||
}
|
||||
|
||||
pub(crate) fn pointer(&self) -> Option<u64> {
|
||||
let value = self.pointer.as_ref()?;
|
||||
|
||||
if value.iter().skip(8).copied().any(|byte| byte != 0) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let pointer = [
|
||||
value.first().copied().unwrap_or(0),
|
||||
value.get(1).copied().unwrap_or(0),
|
||||
value.get(2).copied().unwrap_or(0),
|
||||
value.get(3).copied().unwrap_or(0),
|
||||
value.get(4).copied().unwrap_or(0),
|
||||
value.get(5).copied().unwrap_or(0),
|
||||
value.get(6).copied().unwrap_or(0),
|
||||
value.get(7).copied().unwrap_or(0),
|
||||
];
|
||||
|
||||
Some(u64::from_le_bytes(pointer))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn to_witness(&self) -> chainhook_sdk::bitcoin::Witness {
|
||||
let builder = script::Builder::new();
|
||||
|
||||
let script = self.append_reveal_script(builder);
|
||||
|
||||
let mut witness = chainhook_sdk::bitcoin::Witness::new();
|
||||
|
||||
witness.push(script);
|
||||
witness.push([]);
|
||||
|
||||
witness
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use chainhook_sdk::bitcoin::Witness;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn inscription(content_type: &str, body: impl AsRef<[u8]>) -> Inscription {
|
||||
Inscription::new(Some(content_type.into()), Some(body.as_ref().into()))
|
||||
}
|
||||
|
||||
fn envelope(payload: &[&[u8]]) -> Witness {
|
||||
let mut builder = script::Builder::new()
|
||||
.push_opcode(opcodes::OP_FALSE)
|
||||
.push_opcode(opcodes::all::OP_IF);
|
||||
|
||||
for data in payload {
|
||||
let mut buf = PushBytesBuf::new();
|
||||
buf.extend_from_slice(data).unwrap();
|
||||
builder = builder.push_slice(buf);
|
||||
}
|
||||
|
||||
let script = builder.push_opcode(opcodes::all::OP_ENDIF).into_script();
|
||||
|
||||
Witness::from_slice(&[script.into_bytes(), Vec::new()])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reveal_script_chunks_body() {
|
||||
assert_eq!(
|
||||
inscription("foo", [])
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
7
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
inscription("foo", [0; 1])
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
8
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
inscription("foo", [0; 520])
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
8
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
inscription("foo", [0; 521])
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
9
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
inscription("foo", [0; 1040])
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
9
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
inscription("foo", [0; 1041])
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
10
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reveal_script_chunks_metadata() {
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
metadata: None,
|
||||
..Default::default()
|
||||
}
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
4
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
metadata: Some(Vec::new()),
|
||||
..Default::default()
|
||||
}
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
4
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
metadata: Some(vec![0; 1]),
|
||||
..Default::default()
|
||||
}
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
6
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
metadata: Some(vec![0; 520]),
|
||||
..Default::default()
|
||||
}
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
6
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
metadata: Some(vec![0; 521]),
|
||||
..Default::default()
|
||||
}
|
||||
.append_reveal_script(script::Builder::new())
|
||||
.instructions()
|
||||
.count(),
|
||||
8
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_with_no_parent_field_has_no_parent() {
|
||||
assert!(Inscription {
|
||||
parent: None,
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_with_parent_field_shorter_than_txid_length_has_no_parent() {
|
||||
assert!(Inscription {
|
||||
parent: Some(vec![]),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_with_parent_field_longer_than_txid_and_index_has_no_parent() {
|
||||
assert!(Inscription {
|
||||
parent: Some(vec![1; 37]),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_with_parent_field_index_with_trailing_zeroes_and_fixed_length_has_parent() {
|
||||
let mut parent = vec![1; 36];
|
||||
|
||||
parent[35] = 0;
|
||||
|
||||
assert!(Inscription {
|
||||
parent: Some(parent),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_with_parent_field_index_with_trailing_zeroes_and_variable_length_has_no_parent()
|
||||
{
|
||||
let mut parent = vec![1; 35];
|
||||
|
||||
parent[34] = 0;
|
||||
|
||||
assert!(Inscription {
|
||||
parent: Some(parent),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_parent_txid_is_deserialized_correctly() {
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
parent: Some(vec![
|
||||
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c,
|
||||
0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19,
|
||||
0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.unwrap()
|
||||
.txid,
|
||||
"1f1e1d1c1b1a191817161514131211100f0e0d0c0b0a09080706050403020100"
|
||||
.parse()
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_parent_with_zero_byte_index_field_is_deserialized_correctly() {
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
parent: Some(vec![1; 32]),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.unwrap()
|
||||
.index,
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_parent_with_one_byte_index_field_is_deserialized_correctly() {
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
parent: Some(vec![
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.unwrap()
|
||||
.index,
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_parent_with_two_byte_index_field_is_deserialized_correctly() {
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
parent: Some(vec![
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x02
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.unwrap()
|
||||
.index,
|
||||
0x0201,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_parent_with_three_byte_index_field_is_deserialized_correctly() {
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
parent: Some(vec![
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x02, 0x03
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.unwrap()
|
||||
.index,
|
||||
0x030201,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inscription_parent_with_four_byte_index_field_is_deserialized_correctly() {
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
parent: Some(vec![
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
|
||||
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x02, 0x03, 0x04,
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
.parent()
|
||||
.unwrap()
|
||||
.index,
|
||||
0x04030201,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pointer_decode() {
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
pointer: None,
|
||||
..Default::default()
|
||||
}
|
||||
.pointer(),
|
||||
None
|
||||
);
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
pointer: Some(vec![0]),
|
||||
..Default::default()
|
||||
}
|
||||
.pointer(),
|
||||
Some(0),
|
||||
);
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8]),
|
||||
..Default::default()
|
||||
}
|
||||
.pointer(),
|
||||
Some(0x0807060504030201),
|
||||
);
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
pointer: Some(vec![1, 2, 3, 4, 5, 6]),
|
||||
..Default::default()
|
||||
}
|
||||
.pointer(),
|
||||
Some(0x0000060504030201),
|
||||
);
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0, 0]),
|
||||
..Default::default()
|
||||
}
|
||||
.pointer(),
|
||||
Some(0x0807060504030201),
|
||||
);
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0, 1]),
|
||||
..Default::default()
|
||||
}
|
||||
.pointer(),
|
||||
None,
|
||||
);
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8, 1]),
|
||||
..Default::default()
|
||||
}
|
||||
.pointer(),
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pointer_encode() {
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
pointer: None,
|
||||
..Default::default()
|
||||
}
|
||||
.to_witness(),
|
||||
envelope(&[b"ord"]),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Inscription {
|
||||
pointer: Some(vec![1, 2, 3]),
|
||||
..Default::default()
|
||||
}
|
||||
.to_witness(),
|
||||
envelope(&[b"ord", &[2], &[1, 2, 3]]),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ use std::{
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use chainhook_sdk::bitcoincore_rpc::bitcoin::Txid;
|
||||
use chainhook_sdk::bitcoin::Txid;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
use super::deserialize_from_str::DeserializeFromStr;
|
||||
@@ -43,7 +43,7 @@ pub enum ParseError {
|
||||
Character(char),
|
||||
Length(usize),
|
||||
Separator(char),
|
||||
Txid(chainhook_sdk::bitcoincore_rpc::bitcoin::hashes::hex::Error),
|
||||
Txid(chainhook_sdk::bitcoin::hashes::hex::HexToArrayError),
|
||||
Index(std::num::ParseIntError),
|
||||
}
|
||||
|
||||
|
||||
102
components/ordhook-core/src/ord/media.rs
Normal file
102
components/ordhook-core/src/ord/media.rs
Normal file
@@ -0,0 +1,102 @@
|
||||
use std::{
|
||||
fmt::{Display, Formatter},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Error};
|
||||
|
||||
#[derive(Debug, PartialEq, Copy, Clone)]
|
||||
pub(crate) enum Media {
|
||||
Audio,
|
||||
Code(Language),
|
||||
Font,
|
||||
Iframe,
|
||||
Image,
|
||||
Markdown,
|
||||
Model,
|
||||
Pdf,
|
||||
Text,
|
||||
Unknown,
|
||||
Video,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Copy, Clone)]
|
||||
pub(crate) enum Language {
|
||||
Css,
|
||||
JavaScript,
|
||||
Json,
|
||||
Python,
|
||||
Yaml,
|
||||
}
|
||||
|
||||
impl Display for Language {
|
||||
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
Self::Css => "css",
|
||||
Self::JavaScript => "javascript",
|
||||
Self::Json => "json",
|
||||
Self::Python => "python",
|
||||
Self::Yaml => "yaml",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Media {
|
||||
#[rustfmt::skip]
|
||||
const TABLE: &'static [(&'static str, Media, &'static [&'static str])] = &[
|
||||
("application/cbor", Media::Unknown, &["cbor"]),
|
||||
("application/json", Media::Code(Language::Json), &["json"]),
|
||||
("application/octet-stream", Media::Unknown, &["bin"]),
|
||||
("application/pdf", Media::Pdf, &["pdf"]),
|
||||
("application/pgp-signature", Media::Text, &["asc"]),
|
||||
("application/protobuf", Media::Unknown, &["binpb"]),
|
||||
("application/x-javascript", Media::Code(Language::JavaScript), &[]),
|
||||
("application/yaml", Media::Code(Language::Yaml), &["yaml", "yml"]),
|
||||
("audio/flac", Media::Audio, &["flac"]),
|
||||
("audio/mpeg", Media::Audio, &["mp3"]),
|
||||
("audio/wav", Media::Audio, &["wav"]),
|
||||
("font/otf", Media::Font, &["otf"]),
|
||||
("font/ttf", Media::Font, &["ttf"]),
|
||||
("font/woff", Media::Font, &["woff"]),
|
||||
("font/woff2", Media::Font, &["woff2"]),
|
||||
("image/apng", Media::Image, &["apng"]),
|
||||
("image/avif", Media::Image, &[]),
|
||||
("image/gif", Media::Image, &["gif"]),
|
||||
("image/jpeg", Media::Image, &["jpg", "jpeg"]),
|
||||
("image/png", Media::Image, &["png"]),
|
||||
("image/svg+xml", Media::Iframe, &["svg"]),
|
||||
("image/webp", Media::Image, &["webp"]),
|
||||
("model/gltf+json", Media::Model, &["gltf"]),
|
||||
("model/gltf-binary", Media::Model, &["glb"]),
|
||||
("model/stl", Media::Unknown, &["stl"]),
|
||||
("text/css", Media::Code(Language::Css), &["css"]),
|
||||
("text/html", Media::Iframe, &[]),
|
||||
("text/html;charset=utf-8", Media::Iframe, &["html"]),
|
||||
("text/javascript", Media::Code(Language::JavaScript), &["js"]),
|
||||
("text/markdown", Media::Markdown, &[]),
|
||||
("text/markdown;charset=utf-8", Media::Markdown, &["md"]),
|
||||
("text/plain", Media::Text, &[]),
|
||||
("text/plain;charset=utf-8", Media::Text, &["txt"]),
|
||||
("text/x-python", Media::Code(Language::Python), &["py"]),
|
||||
("video/mp4", Media::Video, &["mp4"]),
|
||||
("video/webm", Media::Video, &["webm"]),
|
||||
];
|
||||
}
|
||||
|
||||
impl FromStr for Media {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
for entry in Self::TABLE {
|
||||
if entry.0 == s {
|
||||
return Ok(entry.1);
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("unknown content type: {s}"))
|
||||
}
|
||||
}
|
||||
@@ -7,9 +7,12 @@ use chainhook_sdk::types::BitcoinNetwork;
|
||||
|
||||
pub mod chain;
|
||||
pub mod deserialize_from_str;
|
||||
pub mod envelope;
|
||||
pub mod epoch;
|
||||
pub mod height;
|
||||
pub mod inscription;
|
||||
pub mod inscription_id;
|
||||
pub mod media;
|
||||
pub mod sat;
|
||||
pub mod sat_point;
|
||||
|
||||
@@ -18,3 +21,4 @@ const DIFFCHANGE_INTERVAL: u64 =
|
||||
const SUBSIDY_HALVING_INTERVAL: u64 =
|
||||
chainhook_sdk::bitcoincore_rpc::bitcoin::blockdata::constants::SUBSIDY_HALVING_INTERVAL as u64;
|
||||
const CYCLE_EPOCHS: u64 = 6;
|
||||
pub const COIN_VALUE: u64 = 100_000_000;
|
||||
|
||||
@@ -90,7 +90,7 @@ impl AddAssign<u64> for Sat {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use chainhook_sdk::bitcoincore_rpc::bitcoin::blockdata::constants::COIN_VALUE;
|
||||
use super::COIN_VALUE;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
||||
@@ -139,7 +139,7 @@ pub async fn scan_bitcoin_chainstate_via_rpc_using_predicate(
|
||||
|
||||
let inscriptions_revealed = get_inscriptions_revealed_in_block(&block)
|
||||
.iter()
|
||||
.map(|d| d.inscription_number.to_string())
|
||||
.map(|d| d.get_inscription_number().to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let inscriptions_transferred = get_inscriptions_transferred_in_block(&block).len();
|
||||
@@ -234,7 +234,7 @@ pub async fn execute_predicates_action<'a>(
|
||||
Ok(action) => {
|
||||
actions_triggered += 1;
|
||||
match action {
|
||||
BitcoinChainhookOccurrence::Http(request) => {
|
||||
BitcoinChainhookOccurrence::Http(request, _data) => {
|
||||
send_request(request, 60, 3, &ctx).await?
|
||||
}
|
||||
BitcoinChainhookOccurrence::File(path, bytes) => {
|
||||
|
||||
@@ -17,8 +17,11 @@ use crate::core::{new_traversals_lazy_cache, should_sync_ordhook_db, should_sync
|
||||
use crate::db::{
|
||||
delete_data_in_ordhook_db, insert_entry_in_blocks, open_ordhook_db_conn_rocks_db_loop,
|
||||
open_readwrite_ordhook_db_conn, open_readwrite_ordhook_dbs, update_inscriptions_with_block,
|
||||
update_locations_with_block, update_sequence_metadata_with_block, LazyBlock,
|
||||
LazyBlockTransaction,
|
||||
update_locations_with_block, BlockBytesCursor, TransactionBytesCursor,
|
||||
};
|
||||
use crate::db::{
|
||||
find_last_block_inserted, find_missing_blocks, run_compaction,
|
||||
update_sequence_metadata_with_block,
|
||||
};
|
||||
use crate::scan::bitcoin::process_block_with_predicates;
|
||||
use crate::service::http_api::start_predicate_api_server;
|
||||
@@ -28,7 +31,6 @@ use crate::service::observers::{
|
||||
update_observer_streaming_enabled, ObserverReport,
|
||||
};
|
||||
use crate::service::runloops::start_bitcoin_scan_runloop;
|
||||
|
||||
use chainhook_sdk::chainhooks::bitcoin::BitcoinChainhookOccurrencePayload;
|
||||
use chainhook_sdk::chainhooks::types::{
|
||||
BitcoinChainhookSpecification, ChainhookFullSpecification, ChainhookSpecification,
|
||||
@@ -65,11 +67,14 @@ impl Service {
|
||||
predicate_activity_relayer: Option<
|
||||
crossbeam_channel::Sender<BitcoinChainhookOccurrencePayload>,
|
||||
>,
|
||||
check_blocks_integrity: bool,
|
||||
) -> Result<(), String> {
|
||||
let mut event_observer_config = self.config.get_event_observer_config();
|
||||
|
||||
// Catch-up with chain tip
|
||||
let chain_tip_height = self.catch_up_with_chain_tip(false).await?;
|
||||
let chain_tip_height = self
|
||||
.catch_up_with_chain_tip(false, check_blocks_integrity)
|
||||
.await?;
|
||||
info!(
|
||||
self.ctx.expect_logger(),
|
||||
"Database up to date, service will start streaming blocks"
|
||||
@@ -442,23 +447,69 @@ impl Service {
|
||||
pub async fn catch_up_with_chain_tip(
|
||||
&mut self,
|
||||
rebuild_from_scratch: bool,
|
||||
compact_and_check_rocksdb_integrity: bool,
|
||||
) -> Result<u64, String> {
|
||||
if rebuild_from_scratch {
|
||||
let blocks_db = open_ordhook_db_conn_rocks_db_loop(
|
||||
true,
|
||||
&self.config.expected_cache_path(),
|
||||
&self.ctx,
|
||||
);
|
||||
let inscriptions_db_conn_rw =
|
||||
open_readwrite_ordhook_db_conn(&self.config.expected_cache_path(), &self.ctx)?;
|
||||
{
|
||||
if compact_and_check_rocksdb_integrity {
|
||||
let (tip, missing_blocks) = {
|
||||
let blocks_db = open_ordhook_db_conn_rocks_db_loop(
|
||||
false,
|
||||
&self.config.expected_cache_path(),
|
||||
&self.ctx,
|
||||
);
|
||||
let tip = find_last_block_inserted(&blocks_db);
|
||||
info!(
|
||||
self.ctx.expect_logger(),
|
||||
"Checking database integrity up to block #{tip}",
|
||||
);
|
||||
let missing_blocks = find_missing_blocks(&blocks_db, 0, tip, &self.ctx);
|
||||
(tip, missing_blocks)
|
||||
};
|
||||
if !missing_blocks.is_empty() {
|
||||
info!(
|
||||
self.ctx.expect_logger(),
|
||||
"{} missing blocks detected, will attempt to repair data",
|
||||
missing_blocks.len()
|
||||
);
|
||||
let block_ingestion_processor =
|
||||
start_block_archiving_processor(&self.config, &self.ctx, false, None);
|
||||
download_and_pipeline_blocks(
|
||||
&self.config,
|
||||
missing_blocks.into_iter().map(|x| x as u64).collect(),
|
||||
tip.into(),
|
||||
Some(&block_ingestion_processor),
|
||||
10_000,
|
||||
&self.ctx,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
let blocks_db_rw = open_ordhook_db_conn_rocks_db_loop(
|
||||
false,
|
||||
&self.config.expected_cache_path(),
|
||||
&self.ctx,
|
||||
);
|
||||
info!(self.ctx.expect_logger(), "Running database compaction",);
|
||||
run_compaction(&blocks_db_rw, tip);
|
||||
}
|
||||
|
||||
delete_data_in_ordhook_db(
|
||||
767430,
|
||||
820000,
|
||||
&blocks_db,
|
||||
&inscriptions_db_conn_rw,
|
||||
&self.ctx,
|
||||
)?;
|
||||
if rebuild_from_scratch {
|
||||
let blocks_db_rw = open_ordhook_db_conn_rocks_db_loop(
|
||||
false,
|
||||
&self.config.expected_cache_path(),
|
||||
&self.ctx,
|
||||
);
|
||||
|
||||
let inscriptions_db_conn_rw =
|
||||
open_readwrite_ordhook_db_conn(&self.config.expected_cache_path(), &self.ctx)?;
|
||||
|
||||
delete_data_in_ordhook_db(
|
||||
767430,
|
||||
820000,
|
||||
&blocks_db_rw,
|
||||
&inscriptions_db_conn_rw,
|
||||
&self.ctx,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
self.update_state(None).await
|
||||
}
|
||||
@@ -598,8 +649,8 @@ fn chainhook_sidecar_mutate_ordhook_db(command: HandleBlock, config: &Config, ct
|
||||
}
|
||||
}
|
||||
HandleBlock::ApplyBlock(block) => {
|
||||
let compressed_block: LazyBlock = match LazyBlock::from_standardized_block(&block) {
|
||||
Ok(block) => block,
|
||||
let block_bytes = match BlockBytesCursor::from_standardized_block(&block) {
|
||||
Ok(block_bytes) => block_bytes,
|
||||
Err(e) => {
|
||||
ctx.try_log(|logger| {
|
||||
error!(
|
||||
@@ -614,7 +665,7 @@ fn chainhook_sidecar_mutate_ordhook_db(command: HandleBlock, config: &Config, ct
|
||||
};
|
||||
insert_entry_in_blocks(
|
||||
block.block_identifier.index as u32,
|
||||
&compressed_block,
|
||||
&block_bytes,
|
||||
true,
|
||||
&blocks_db_rw,
|
||||
&ctx,
|
||||
@@ -668,7 +719,7 @@ pub fn start_observer_forwarding(
|
||||
pub fn chainhook_sidecar_mutate_blocks(
|
||||
blocks_to_mutate: &mut Vec<BitcoinBlockDataCached>,
|
||||
blocks_ids_to_rollback: &Vec<BlockIdentifier>,
|
||||
cache_l2: &Arc<DashMap<(u32, [u8; 8]), LazyBlockTransaction, BuildHasherDefault<FxHasher>>>,
|
||||
cache_l2: &Arc<DashMap<(u32, [u8; 8]), TransactionBytesCursor, BuildHasherDefault<FxHasher>>>,
|
||||
config: &Config,
|
||||
ctx: &Context,
|
||||
) {
|
||||
@@ -705,8 +756,8 @@ pub fn chainhook_sidecar_mutate_blocks(
|
||||
let ordhook_config = config.get_ordhook_config();
|
||||
|
||||
for cache in blocks_to_mutate.iter_mut() {
|
||||
let compressed_block: LazyBlock = match LazyBlock::from_standardized_block(&cache.block) {
|
||||
Ok(block) => block,
|
||||
let block_bytes = match BlockBytesCursor::from_standardized_block(&cache.block) {
|
||||
Ok(block_bytes) => block_bytes,
|
||||
Err(e) => {
|
||||
ctx.try_log(|logger| {
|
||||
error!(
|
||||
@@ -722,7 +773,7 @@ pub fn chainhook_sidecar_mutate_blocks(
|
||||
|
||||
insert_entry_in_blocks(
|
||||
cache.block.block_identifier.index as u32,
|
||||
&compressed_block,
|
||||
&block_bytes,
|
||||
true,
|
||||
&blocks_db_rw,
|
||||
&ctx,
|
||||
@@ -754,7 +805,7 @@ pub fn chainhook_sidecar_mutate_blocks(
|
||||
|
||||
let inscriptions_revealed = get_inscriptions_revealed_in_block(&cache.block)
|
||||
.iter()
|
||||
.map(|d| d.inscription_number.to_string())
|
||||
.map(|d| d.get_inscription_number().to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let inscriptions_transferred =
|
||||
|
||||
@@ -19,12 +19,12 @@ serde = "1"
|
||||
[build-dependencies]
|
||||
napi-build = "2.0.1"
|
||||
|
||||
[build]
|
||||
target = "armv7-unknown-linux-gnueabihf"
|
||||
rustflags = ["-C", "link-args=-L/lib/arm-linux-gnueabihf"]
|
||||
# [build]
|
||||
# target = "armv7-unknown-linux-gnueabihf"
|
||||
# rustflags = ["-C", "link-args=-L/lib/arm-linux-gnueabihf"]
|
||||
|
||||
[target.armv7-unknown-linux-gnueabihf]
|
||||
linker = "arm-linux-gnueabihf-g++"
|
||||
# [target.armv7-unknown-linux-gnueabihf]
|
||||
# linker = "arm-linux-gnueabihf-g++"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
# [profile.release]
|
||||
# lto = true
|
||||
|
||||
@@ -159,7 +159,7 @@ impl OrdinalsIndexingRunloop {
|
||||
match cmd {
|
||||
IndexerCommand::StreamBlocks => {
|
||||
// We start the service as soon as the start() method is being called.
|
||||
let future = service.catch_up_with_chain_tip(false);
|
||||
let future = service.catch_up_with_chain_tip(false, true);
|
||||
let _ = hiro_system_kit::nestable_block_on(future).expect("unable to start indexer");
|
||||
let future = service.start_event_observer(observer_sidecar);
|
||||
let (command_tx, event_rx) =
|
||||
|
||||
@@ -22,6 +22,10 @@ RUN apt-get install nodejs -y
|
||||
|
||||
RUN npm install -g @napi-rs/cli yarn
|
||||
|
||||
COPY ./Cargo.toml /src/Cargo.toml
|
||||
|
||||
COPY ./Cargo.lock /src/Cargo.lock
|
||||
|
||||
COPY ./components/ordhook-core /src/components/ordhook-core
|
||||
|
||||
COPY ./components/ordhook-sdk-js /src/components/ordhook-sdk-js
|
||||
@@ -40,7 +44,7 @@ WORKDIR /src/components/ordhook-cli
|
||||
|
||||
RUN cargo build --features release --release
|
||||
|
||||
RUN cp target/release/ordhook /out
|
||||
RUN cp /src/target/release/ordhook /out
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
|
||||
Reference in New Issue
Block a user