fix: upgrade ord dependencies and integrate chainhook-sdk code (#397)

* chore: upgrade ord code

* chore: deps

* chore: add parents table

* chore: move pg deps to workspace

* fix: delegation works

* chore: import chainhook-sdk code

* chore: move ord to its own component

* ci: dockerfile

* docs: ord readme

* refactor: remove all unused code from chainhook-sdk (#412)

* chore: remove stacks types

* checkpoint before removing predicates

* chore: remove all predicates code

* bitcoin dep

* fix: tests

* fix: api tests
This commit is contained in:
Rafael Cárdenas
2025-02-11 12:21:30 -06:00
committed by GitHub
parent c7b9a34b4f
commit fcffa7e5c1
102 changed files with 11968 additions and 5153 deletions

View File

@@ -121,7 +121,11 @@ jobs:
strategy:
fail-fast: false
matrix:
suite: [ordhook-cli, ordhook-core]
suite:
- ordhook-cli
- ordhook-core
- chainhook-sdk
- chainhook-postgres
runs-on: ubuntu-latest
defaults:
run:

2492
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,17 @@
[workspace]
members = [
"components/chainhook-sdk",
"components/chainhook-postgres",
"components/chainhook-types-rs",
"components/ordhook-cli",
"components/ordhook-core",
"components/ord",
]
default-members = ["components/ordhook-cli"]
resolver = "2"
[workspace.dependencies]
bitcoin = "0.31.2"
tokio-postgres = "0.7.10"
deadpool-postgres = "0.14.0"
refinery = { version = "0.8", features = ["tokio-postgres"] }

View File

@@ -171,7 +171,12 @@ export class PgStore extends BasePgStore {
i.fee AS genesis_fee,
i.curse_type,
i.ordinal_number AS sat_ordinal,
i.parent,
(
SELECT ip.parent_inscription_id
FROM inscription_parents AS ip
WHERE ip.inscription_id = i.inscription_id
LIMIT 1
) AS parent,
i.metadata,
s.rarity AS sat_rarity,
s.coinbase_height AS sat_coinbase_height,

View File

@@ -55,7 +55,6 @@ describe('ETag cache', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 10000,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -156,7 +155,6 @@ describe('ETag cache', () => {
delegate: null,
metaprotocol: null,
metadata: null,
parent: null,
block_height: '778575',
block_hash: '000000000000000000016bcbcc915c68bce367e18f09d0945dc6aacc0ee20121',
tx_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201',
@@ -189,7 +187,6 @@ describe('ETag cache', () => {
delegate: null,
metaprotocol: null,
metadata: null,
parent: null,
block_height: '778576',
block_hash: '00000000000000000000a9db2c5d6c5445e7191927d6981ec580ed3c8112e342',
tx_id: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d',
@@ -270,7 +267,6 @@ describe('ETag cache', () => {
delegate: null,
metaprotocol: null,
metadata: null,
parent: null,
block_height: '778575',
block_hash: randomHash(),
tx_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201',
@@ -322,7 +318,6 @@ describe('ETag cache', () => {
delegate: null,
metaprotocol: null,
metadata: null,
parent: null,
block_height: '778576',
block_hash: randomHash(),
tx_id: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d',
@@ -366,7 +361,6 @@ describe('ETag cache', () => {
delegate: null,
metaprotocol: null,
metadata: null,
parent: null,
block_height: '778575',
block_hash: randomHash(),
tx_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201',
@@ -418,7 +412,6 @@ describe('ETag cache', () => {
delegate: null,
metaprotocol: null,
metadata: null,
parent: null,
block_height: '778576',
block_hash: randomHash(),
tx_id: '00000000000000000002a90330a99f67e3f01eb2ce070b45930581e82fb7a91d',

View File

@@ -11,6 +11,7 @@ import {
insertTestInscriptionRecursion,
inscriptionTransfer,
randomHash,
insertTestInscriptionParent,
} from '../helpers';
describe('/inscriptions', () => {
@@ -57,7 +58,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -144,7 +144,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -242,7 +241,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 0,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -275,7 +273,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0',
delegate: null,
timestamp: 0,
output: 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421:0',
@@ -287,6 +284,10 @@ describe('/inscriptions', () => {
rarity: 'common',
coinbase_height: '51483',
});
await insertTestInscriptionParent(db.sql, {
inscription_id: 'f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0',
parent_inscription_id: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201i0',
});
const response = await fastify.inject({
method: 'GET',
url: '/ordinals/v1/inscriptions/f351d86c6e6cae3c64e297e7463095732f216875bcc1f3c03f950a492bb25421i0',
@@ -319,7 +320,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: JSON.stringify({ foo: 'bar', test: 1337 }),
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -361,7 +361,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -443,7 +442,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -525,7 +523,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -675,7 +672,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -788,7 +784,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -938,7 +933,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -981,7 +975,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -1014,7 +1007,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dci0',
timestamp: 1676913207,
output: '42174ecc8a245841035793390bb53d63b3c2acb61366446f601b09e73b94b656:0',
@@ -1059,7 +1051,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -1212,7 +1203,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -1245,7 +1235,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '7ac73ecd01b9da4a7eab904655416dbfe8e03f193e091761b5a63ad0963570cd:0',
@@ -1574,7 +1563,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -1607,7 +1595,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -1714,7 +1701,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -1747,7 +1733,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -1874,7 +1859,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -1907,7 +1891,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -1970,7 +1953,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2003,7 +1985,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2069,7 +2050,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2102,7 +2082,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2164,7 +2143,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2197,7 +2175,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2261,7 +2238,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '25b372de3de0cb6fcc52c89a8bc3fb78eec596521ba20de16e53c1585be7c3fc:0',
@@ -2307,7 +2283,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2340,7 +2315,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2388,7 +2362,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1677731361,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2421,7 +2394,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1675312161,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2477,7 +2449,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1677731361,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2510,7 +2481,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1675312161,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2566,7 +2536,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1677731361,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2599,7 +2568,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1675312161,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2655,7 +2623,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1677731361,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2688,7 +2655,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1675312161,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2743,7 +2709,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1677731361,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2776,7 +2741,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1675312161,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2823,7 +2787,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1677731361,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2856,7 +2819,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1675312161,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -2913,7 +2875,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1677731361,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -2948,7 +2909,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1675312161,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -3012,7 +2972,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1677731361,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -3045,7 +3004,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1675312161,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -3105,7 +3063,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1677731361,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -3138,7 +3095,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1675312161,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -3197,7 +3153,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -3230,7 +3185,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -3263,7 +3217,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '567c7605439dfdc3a289d13fd2132237852f4a56e784b9364ba94499d5f9baf1:0',
@@ -3321,7 +3274,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -3354,7 +3306,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -3387,7 +3338,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '567c7605439dfdc3a289d13fd2132237852f4a56e784b9364ba94499d5f9baf1:0',
@@ -3445,7 +3395,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -3478,7 +3427,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -3511,7 +3459,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '567c7605439dfdc3a289d13fd2132237852f4a56e784b9364ba94499d5f9baf1:0',
@@ -3569,7 +3516,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '9f4a9b73b0713c5da01c0a47f97c6c001af9028d6bdd9e264dfacbc4e6790201:0',
@@ -3602,7 +3548,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -3635,7 +3580,6 @@ describe('/inscriptions', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207,
output: '567c7605439dfdc3a289d13fd2132237852f4a56e784b9364ba94499d5f9baf1:0',

View File

@@ -74,7 +74,6 @@ describe('/sats', () => {
delegate: null,
metaprotocol: null,
metadata: null,
parent: null,
block_height: '775617',
block_hash: '163de66dc9c0949905bfe8e148bde04600223cf88d19f26fdbeba1d6e6fa0f88',
tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc',
@@ -118,7 +117,6 @@ describe('/sats', () => {
delegate: null,
metaprotocol: null,
metadata: null,
parent: null,
block_height: '775617',
block_hash: '163de66dc9c0949905bfe8e148bde04600223cf88d19f26fdbeba1d6e6fa0f88',
tx_id: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc',
@@ -152,7 +150,6 @@ describe('/sats', () => {
delegate: null,
metaprotocol: null,
metadata: null,
parent: null,
block_height: '775618',
block_hash: '000000000000000000002a244dc7dfcf8ab85e42d182531c27197fc125086f19',
tx_id: 'b9cd9489fe30b81d007f753663d12766f1368721a87f4c69056c8215caa57993',

View File

@@ -67,7 +67,6 @@ describe('Status', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207000,
output: '38c46a8bf7ec90bc7f6b797e7dc84baa97f4e5fd4286b92fe1b50176d03b18dc:0',
@@ -100,7 +99,6 @@ describe('Status', () => {
pointer: null,
metadata: null,
metaprotocol: null,
parent: null,
delegate: null,
timestamp: 1676913207000,
output: 'a98d7055a77fa0b96cc31e30bb8bacf777382d1b67f1b7eca6f2014e961591c8:0',

View File

@@ -75,7 +75,6 @@ type TestOrdinalsInscriptionsRow = {
pointer: string | null;
metadata: string | null;
metaprotocol: string | null;
parent: string | null;
delegate: string | null;
timestamp: number;
};
@@ -183,6 +182,17 @@ export async function insertTestInscriptionRecursion(
await sql`INSERT INTO inscription_recursions ${sql(row)}`;
}
type TestOrdinalsInscriptionParentsRow = {
inscription_id: string;
parent_inscription_id: string;
};
export async function insertTestInscriptionParent(
sql: PgSqlClient,
row: TestOrdinalsInscriptionParentsRow
) {
await sql`INSERT INTO inscription_parents ${sql(row)}`;
}
export async function updateTestChainTip(sql: PgSqlClient, blockHeight: number) {
await sql`UPDATE chain_tip SET block_height = ${blockHeight}`;
}
@@ -218,7 +228,6 @@ export async function inscriptionReveal(sql: PgSqlClient, reveal: TestOrdinalsIn
pointer: reveal.pointer,
metadata: reveal.metadata,
metaprotocol: reveal.metaprotocol,
parent: reveal.parent,
delegate: reveal.delegate,
timestamp: reveal.timestamp,
});

View File

@@ -5,12 +5,11 @@ edition = "2021"
[dependencies]
bytes = "1.3"
chainhook-sdk = { version = "=0.12.10" }
deadpool-postgres = { workspace = true }
num-traits = "0.2.14"
slog = { version = "2.7.0" }
tokio-postgres = "0.7.10"
tokio = { version = "1.38.0", features = ["rt-multi-thread", "macros"] }
deadpool-postgres = "0.14.0"
tokio-postgres = { workspace = true }
[dev-dependencies]
test-case = "3.1.0"

View File

@@ -1,10 +1,7 @@
pub mod types;
pub mod utils;
pub use deadpool_postgres;
use deadpool_postgres::{Manager, ManagerConfig, Object, Pool, RecyclingMethod, Transaction};
pub use tokio_postgres;
use tokio_postgres::{Client, Config, NoTls, Row};
/// Standard chunk size to use when we're batching multiple query inserts into a single SQL statement to save on DB round trips.

View File

@@ -0,0 +1,39 @@
[package]
name = "chainhook-sdk"
version = "0.12.12"
description = "Stateless Transaction Indexing Engine for Stacks and Bitcoin"
license = "GPL-3.0"
edition = "2021"
[dependencies]
serde = { version = "1", features = ["rc"] }
serde_json = { version = "1", features = ["arbitrary_precision"] }
serde-hex = "0.1.0"
serde_derive = "1"
hiro-system-kit = { version = "0.3.4", optional = true }
rocket = { version = "=0.5.0", features = ["json"] }
bitcoin = { workspace = true }
bitcoincore-rpc = "0.18.0"
bitcoincore-rpc-json = "0.18.0"
reqwest = { version = "0.12", default-features = false, features = [
"blocking",
"json",
"rustls-tls",
] }
tokio = { version = "1.38.1", features = ["full"] }
base58 = "0.2.0"
crossbeam-channel = "0.5.6"
hex = "0.4.3"
zmq = "0.10.0"
lazy_static = "1.4.0"
chainhook-types = { path = "../chainhook-types-rs" }
[dev-dependencies]
assert-json-diff = "2.0.2"
test-case = "3.1.0"
[features]
default = ["hiro-system-kit/log"]
debug = ["hiro-system-kit/debug"]
release = ["hiro-system-kit/release_debug", "hiro-system-kit/full_log_level_prefix"]

View File

@@ -0,0 +1,470 @@
use std::time::Duration;
use crate::observer::BitcoinConfig;
use crate::utils::Context;
use bitcoincore_rpc::bitcoin::hashes::Hash;
use bitcoincore_rpc::bitcoin::{self, Amount, BlockHash};
use bitcoincore_rpc::jsonrpc::error::RpcError;
use bitcoincore_rpc_json::GetRawTransactionResultVoutScriptPubKey;
use chainhook_types::bitcoin::{OutPoint, TxIn, TxOut};
use chainhook_types::{
BitcoinBlockData, BitcoinBlockMetadata, BitcoinNetwork, BitcoinTransactionData,
BitcoinTransactionMetadata, BlockHeader, BlockIdentifier, TransactionIdentifier,
};
use hiro_system_kit::slog;
use reqwest::Client as HttpClient;
use serde::Deserialize;
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BitcoinBlockFullBreakdown {
pub hash: String,
pub height: usize,
pub tx: Vec<BitcoinTransactionFullBreakdown>,
pub time: usize,
pub nonce: u32,
pub previousblockhash: Option<String>,
pub confirmations: i32,
}
impl BitcoinBlockFullBreakdown {
pub fn get_block_header(&self) -> BlockHeader {
// Block id
let hash = format!("0x{}", self.hash);
let block_identifier = BlockIdentifier {
index: self.height as u64,
hash,
};
// Parent block id
let parent_block_hash = match self.previousblockhash {
Some(ref value) => format!("0x{}", value),
None => format!("0x{}", BlockHash::all_zeros()),
};
let parent_block_identifier = BlockIdentifier {
index: (self.height - 1) as u64,
hash: parent_block_hash,
};
BlockHeader {
block_identifier,
parent_block_identifier,
}
}
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BitcoinTransactionFullBreakdown {
pub txid: String,
pub vin: Vec<BitcoinTransactionInputFullBreakdown>,
pub vout: Vec<BitcoinTransactionOutputFullBreakdown>,
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BitcoinTransactionInputFullBreakdown {
pub sequence: u32,
/// The raw scriptSig in case of a coinbase tx.
// #[serde(default, with = "bitcoincore_rpc_json::serde_hex::opt")]
// pub coinbase: Option<Vec<u8>>,
/// Not provided for coinbase txs.
pub txid: Option<String>,
/// Not provided for coinbase txs.
pub vout: Option<u32>,
/// The scriptSig in case of a non-coinbase tx.
pub script_sig: Option<GetRawTransactionResultVinScriptSig>,
/// Not provided for coinbase txs.
pub txinwitness: Option<Vec<String>>,
pub prevout: Option<BitcoinTransactionInputPrevoutFullBreakdown>,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct GetRawTransactionResultVinScriptSig {
pub hex: String,
}
impl BitcoinTransactionInputFullBreakdown {
/// Whether this input is from a coinbase tx. If there is not a [BitcoinTransactionInputFullBreakdown::txid] field, the transaction is a coinbase transaction.
// Note: vout and script_sig fields are also not provided for coinbase transactions.
pub fn is_coinbase(&self) -> bool {
self.txid.is_none()
}
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BitcoinTransactionInputPrevoutFullBreakdown {
pub height: u64,
#[serde(with = "bitcoin::amount::serde::as_btc")]
pub value: Amount,
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BitcoinTransactionOutputFullBreakdown {
#[serde(with = "bitcoin::amount::serde::as_btc")]
pub value: Amount,
pub n: u32,
pub script_pub_key: GetRawTransactionResultVoutScriptPubKey,
}
#[derive(Deserialize, Serialize)]
pub struct NewBitcoinBlock {
pub burn_block_hash: String,
pub burn_block_height: u64,
pub reward_slot_holders: Vec<String>,
pub reward_recipients: Vec<RewardParticipant>,
pub burn_amount: u64,
}
#[allow(dead_code)]
#[derive(Deserialize, Serialize)]
pub struct RewardParticipant {
recipient: String,
amt: u64,
}
pub fn build_http_client() -> HttpClient {
HttpClient::builder()
.timeout(Duration::from_secs(15))
.http1_only()
.no_hickory_dns()
.connect_timeout(Duration::from_secs(15))
.tcp_keepalive(Some(Duration::from_secs(15)))
.no_proxy()
.danger_accept_invalid_certs(true)
.build()
.expect("Unable to build http client")
}
pub async fn download_and_parse_block_with_retry(
http_client: &HttpClient,
block_hash: &str,
bitcoin_config: &BitcoinConfig,
ctx: &Context,
) -> Result<BitcoinBlockFullBreakdown, String> {
let mut errors_count = 0;
let max_retries = 20;
let block = loop {
match download_and_parse_block(http_client, block_hash, bitcoin_config, ctx).await {
Ok(result) => break result,
Err(e) => {
errors_count += 1;
if errors_count > 3 && errors_count < max_retries {
ctx.try_log(|logger| {
slog::warn!(
logger,
"unable to fetch and parse block #{block_hash}: will retry in a few seconds (attempt #{errors_count}). Error: {e}",
)
});
} else if errors_count == max_retries {
return Err(format!("unable to fetch and parse block #{block_hash} after {errors_count} attempts. Error: {e}"));
}
std::thread::sleep(std::time::Duration::from_secs(1));
}
}
};
Ok(block)
}
pub async fn retrieve_block_hash_with_retry(
http_client: &HttpClient,
block_height: &u64,
bitcoin_config: &BitcoinConfig,
ctx: &Context,
) -> Result<String, String> {
let mut errors_count = 0;
let max_retries = 10;
let block_hash = loop {
match retrieve_block_hash(http_client, block_height, bitcoin_config, ctx).await {
Ok(result) => break result,
Err(e) => {
errors_count += 1;
if errors_count > 3 && errors_count < max_retries {
ctx.try_log(|logger| {
slog::warn!(
logger,
"unable to retrieve block hash #{block_height}: will retry in a few seconds (attempt #{errors_count}). Error: {e}",
)
});
} else if errors_count == max_retries {
return Err(format!("unable to retrieve block hash #{block_height} after {errors_count} attempts. Error: {e}"));
}
std::thread::sleep(std::time::Duration::from_secs(2));
}
}
};
Ok(block_hash)
}
pub async fn retrieve_block_hash(
http_client: &HttpClient,
block_height: &u64,
bitcoin_config: &BitcoinConfig,
_ctx: &Context,
) -> Result<String, String> {
let body = json!({
"jsonrpc": "1.0",
"id": "chainhook-cli",
"method": "getblockhash",
"params": [block_height]
});
let block_hash = http_client
.post(&bitcoin_config.rpc_url)
.basic_auth(&bitcoin_config.username, Some(&bitcoin_config.password))
.header("Content-Type", "application/json")
.header("Host", &bitcoin_config.rpc_url[7..])
.json(&body)
.send()
.await
.map_err(|e| format!("unable to send request ({})", e))?
.json::<bitcoincore_rpc::jsonrpc::Response>()
.await
.map_err(|e| format!("unable to parse response ({})", e))?
.result::<String>()
.map_err(|e| format!("unable to parse response ({})", e))?;
Ok(block_hash)
}
// not used internally by chainhook; exported for ordhook
pub async fn try_download_block_bytes_with_retry(
http_client: HttpClient,
block_height: u64,
bitcoin_config: BitcoinConfig,
ctx: Context,
) -> Result<Vec<u8>, String> {
let block_hash =
retrieve_block_hash_with_retry(&http_client, &block_height, &bitcoin_config, &ctx)
.await
.unwrap();
let mut errors_count = 0;
let response = loop {
match download_block(&http_client, &block_hash, &bitcoin_config, &ctx).await {
Ok(result) => break result,
Err(_e) => {
errors_count += 1;
if errors_count > 1 {
ctx.try_log(|logger| {
slog::warn!(
logger,
"unable to fetch block #{block_hash}: will retry in a few seconds (attempt #{errors_count}).",
)
});
}
std::thread::sleep(std::time::Duration::from_millis(1500));
continue;
}
}
};
Ok(response)
}
#[derive(Debug, Clone, Deserialize)]
pub struct RpcErrorResponse {
pub error: RpcError,
}
pub async fn download_block(
http_client: &HttpClient,
block_hash: &str,
bitcoin_config: &BitcoinConfig,
_ctx: &Context,
) -> Result<Vec<u8>, String> {
let body = json!({
"jsonrpc": "1.0",
"id": "chainhook-cli",
"method": "getblock",
"params": [block_hash, 3]
});
let res = http_client
.post(&bitcoin_config.rpc_url)
.basic_auth(&bitcoin_config.username, Some(&bitcoin_config.password))
.header("Content-Type", "application/json")
.header("Host", &bitcoin_config.rpc_url[7..])
.json(&body)
.send()
.await
.map_err(|e| format!("unable to send request ({})", e))?;
// Check status code
if !res.status().is_success() {
return Err(format!(
"http request unsuccessful ({:?})",
res.error_for_status()
));
}
let rpc_response_bytes = res
.bytes()
.await
.map_err(|e| format!("unable to get bytes ({})", e))?
.to_vec();
// Check rpc error presence
if let Ok(rpc_error) = serde_json::from_slice::<RpcErrorResponse>(&rpc_response_bytes[..]) {
return Err(format!(
"rpc request unsuccessful ({})",
rpc_error.error.message
));
}
Ok(rpc_response_bytes)
}
pub fn parse_downloaded_block(
downloaded_block: Vec<u8>,
) -> Result<BitcoinBlockFullBreakdown, String> {
let block = serde_json::from_slice::<bitcoincore_rpc::jsonrpc::Response>(&downloaded_block[..])
.map_err(|e| format!("unable to parse jsonrpc payload ({})", e))?
.result::<BitcoinBlockFullBreakdown>()
.map_err(|e| format!("unable to parse block ({})", e))?;
Ok(block)
}
pub async fn download_and_parse_block(
http_client: &HttpClient,
block_hash: &str,
bitcoin_config: &BitcoinConfig,
_ctx: &Context,
) -> Result<BitcoinBlockFullBreakdown, String> {
let response = download_block(http_client, block_hash, bitcoin_config, _ctx).await?;
parse_downloaded_block(response)
}
pub fn standardize_bitcoin_block(
block: BitcoinBlockFullBreakdown,
network: &BitcoinNetwork,
ctx: &Context,
) -> Result<BitcoinBlockData, (String, bool)> {
let mut transactions = vec![];
let block_height = block.height as u64;
ctx.try_log(|logger| slog::debug!(logger, "Standardizing Bitcoin block {}", block.hash,));
for (tx_index, mut tx) in block.tx.into_iter().enumerate() {
let txid = tx.txid.to_string();
let mut inputs = vec![];
let mut sats_in = 0;
for (index, input) in tx.vin.drain(..).enumerate() {
if input.is_coinbase() {
continue;
}
let prevout = input.prevout.as_ref().ok_or((
format!(
"error retrieving prevout for transaction {}, input #{} (block #{})",
tx.txid, index, block.height
),
true,
))?;
let txid = input.txid.as_ref().ok_or((
format!(
"error retrieving txid for transaction {}, input #{} (block #{})",
tx.txid, index, block.height
),
true,
))?;
let vout = input.vout.ok_or((
format!(
"error retrieving vout for transaction {}, input #{} (block #{})",
tx.txid, index, block.height
),
true,
))?;
let script_sig = input.script_sig.ok_or((
format!(
"error retrieving script_sig for transaction {}, input #{} (block #{})",
tx.txid, index, block.height
),
true,
))?;
sats_in += prevout.value.to_sat();
inputs.push(TxIn {
previous_output: OutPoint {
txid: TransactionIdentifier::new(&txid.to_string()),
vout,
block_height: prevout.height,
value: prevout.value.to_sat(),
},
script_sig: format!("0x{}", script_sig.hex),
sequence: input.sequence,
witness: input
.txinwitness
.unwrap_or(vec![])
.to_vec()
.iter()
.map(|w| format!("0x{}", w))
.collect::<Vec<_>>(),
});
}
let mut outputs = vec![];
let mut sats_out = 0;
for output in tx.vout.drain(..) {
let value = output.value.to_sat();
sats_out += value;
outputs.push(TxOut {
value,
script_pubkey: format!("0x{}", hex::encode(&output.script_pub_key.hex)),
});
}
let tx = BitcoinTransactionData {
transaction_identifier: TransactionIdentifier {
hash: format!("0x{}", txid),
},
operations: vec![],
metadata: BitcoinTransactionMetadata {
inputs,
outputs,
ordinal_operations: vec![],
brc20_operation: None,
proof: None,
fee: sats_in.saturating_sub(sats_out),
index: tx_index as u32,
},
};
transactions.push(tx);
}
Ok(BitcoinBlockData {
block_identifier: BlockIdentifier {
hash: format!("0x{}", block.hash),
index: block_height,
},
parent_block_identifier: BlockIdentifier {
hash: format!(
"0x{}",
block
.previousblockhash
.unwrap_or(BlockHash::all_zeros().to_string())
),
index: match block_height {
0 => 0,
_ => block_height - 1,
},
},
timestamp: block.time as u32,
metadata: BitcoinBlockMetadata {
network: network.clone(),
},
transactions,
})
}
#[cfg(test)]
pub mod tests;
// Test vectors
// 1) Devnet PoB
// 2022-10-26T03:06:17.376341Z INFO chainhook_event_observer::indexer: BitcoinBlockData { block_identifier: BlockIdentifier { index: 104, hash: "0x210d0d095a75d88fc059cb97f453eee33b1833153fb1f81b9c3c031c26bb106b" }, parent_block_identifier: BlockIdentifier { index: 103, hash: "0x5d5a4b8113c35f20fb0b69b1fb1ae1b88461ea57e2a2e4c036f97fae70ca1abb" }, timestamp: 1666753576, transactions: [BitcoinTransactionData { transaction_identifier: TransactionIdentifier { hash: "0xfaaac1833dc4883e7ec28f61e35b41f896c395f8d288b1a177155de2abd6052f" }, operations: [], metadata: BitcoinTransactionMetadata { inputs: [TxIn { previous_output: OutPoint { txid: "0000000000000000000000000000000000000000000000000000000000000000", vout: 4294967295 }, script_sig: "01680101", sequence: 4294967295, witness: [] }], outputs: [TxOut { value: 5000017550, script_pubkey: "76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac" }, TxOut { value: 0, script_pubkey: "6a24aa21a9ed4a190dfdc77e260409c2a693e6d3b8eca43afbc4bffb79ddcdcc9516df804d9b" }], stacks_operations: [] } }, BitcoinTransactionData { transaction_identifier: TransactionIdentifier { hash: "0x59193c24cb2325cd2271b89f790f958dcd4065088680ffbc201a0ebb2f3cbf25" }, operations: [], metadata: BitcoinTransactionMetadata { inputs: [TxIn { previous_output: OutPoint { txid: "9eebe848baaf8dd4810e4e4a91168e2e471c949439faf5d768750ca21d067689", vout: 3 }, script_sig: "483045022100a20f90e9e3c3bb7e558ad4fa65902d8cf6ce4bff1f5af0ac0a323b547385069c022021b9877abbc9d1eef175c7f712ac1b2d8f5ce566be542714effe42711e75b83801210239810ebf35e6f6c26062c99f3e183708d377720617c90a986859ec9c95d00be9", sequence: 4294967293, witness: [] }], outputs: [TxOut { value: 0, script_pubkey: "6a4c5069645b1681995f8e568287e0e4f5cbc1d6727dafb5e3a7822a77c69bd04208265aca9424d0337dac7d9e84371a2c91ece1891d67d3554bd9fdbe60afc6924d4b0773d90000006700010000006600012b" }, TxOut { value: 10000, script_pubkey: "76a914000000000000000000000000000000000000000088ac" }, TxOut { value: 10000, script_pubkey: "76a914000000000000000000000000000000000000000088ac" }, TxOut { value: 4999904850, script_pubkey: "76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac" }], stacks_operations: [PobBlockCommitment(PobBlockCommitmentData { signers: [], stacks_block_hash: "0x5b1681995f8e568287e0e4f5cbc1d6727dafb5e3a7822a77c69bd04208265aca", amount: 10000 })] } }], metadata: BitcoinBlockMetadata }
// 2022-10-26T03:06:21.929157Z INFO chainhook_event_observer::indexer: BitcoinBlockData { block_identifier: BlockIdentifier { index: 105, hash: "0x0302c4c6063eb7199d3a565351bceeea9df4cb4aa09293194dbab277e46c2979" }, parent_block_identifier: BlockIdentifier { index: 104, hash: "0x210d0d095a75d88fc059cb97f453eee33b1833153fb1f81b9c3c031c26bb106b" }, timestamp: 1666753581, transactions: [BitcoinTransactionData { transaction_identifier: TransactionIdentifier { hash: "0xe7de433aa89c1f946f89133b0463b6cfebb26ad73b0771a79fd66c6acbfe3fb9" }, operations: [], metadata: BitcoinTransactionMetadata { inputs: [TxIn { previous_output: OutPoint { txid: "0000000000000000000000000000000000000000000000000000000000000000", vout: 4294967295 }, script_sig: "01690101", sequence: 4294967295, witness: [] }], outputs: [TxOut { value: 5000017600, script_pubkey: "76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac" }, TxOut { value: 0, script_pubkey: "6a24aa21a9ed98ac3bc4e0c9ed53e3418a3bf3aa511dcd76088cf0e1c4fc71fb9755840d7a08" }], stacks_operations: [] } }, BitcoinTransactionData { transaction_identifier: TransactionIdentifier { hash: "0xe654501805d80d59ef0d95b57ad7a924f3be4a4dc0db5a785dfebe1f70c4e23e" }, operations: [], metadata: BitcoinTransactionMetadata { inputs: [TxIn { previous_output: OutPoint { txid: "59193c24cb2325cd2271b89f790f958dcd4065088680ffbc201a0ebb2f3cbf25", vout: 3 }, script_sig: "483045022100b59d2d07f68ea3a4f27a49979080a07b2432cfad9fc90e1edd0241496f0fd83f02205ac233f4cb68ada487f16339abedb7093948b683ba7d76b3b4058b2c0181a68901210239810ebf35e6f6c26062c99f3e183708d377720617c90a986859ec9c95d00be9", sequence: 4294967293, witness: [] }], outputs: [TxOut { value: 0, script_pubkey: "6a4c5069645b351bb015ef4f7dcdce4c9d95cbf157f85a3714626252cfc9078f3f1591ccdb13c3c7e22b34c4ffc2f6064a41df6fcd7f1b759d4f28b2f7cb6b27f283c868406e0000006800010000006600012c" }, TxOut { value: 10000, script_pubkey: "76a914000000000000000000000000000000000000000088ac" }, TxOut { value: 10000, script_pubkey: "76a914000000000000000000000000000000000000000088ac" }, TxOut { value: 4999867250, script_pubkey: "76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac" }], stacks_operations: [PobBlockCommitment(PobBlockCommitmentData { signers: [], stacks_block_hash: "0x5b351bb015ef4f7dcdce4c9d95cbf157f85a3714626252cfc9078f3f1591ccdb", amount: 10000 })] } }], metadata: BitcoinBlockMetadata }
// 2022-10-26T03:07:53.298531Z INFO chainhook_event_observer::indexer: BitcoinBlockData { block_identifier: BlockIdentifier { index: 106, hash: "0x52eb2aa15aa99afc4b918a552cef13e8b6eed84b257be097ad954b4f37a7e98d" }, parent_block_identifier: BlockIdentifier { index: 105, hash: "0x0302c4c6063eb7199d3a565351bceeea9df4cb4aa09293194dbab277e46c2979" }, timestamp: 1666753672, transactions: [BitcoinTransactionData { transaction_identifier: TransactionIdentifier { hash: "0xd28d7f5411416f94b95e9f999d5ee8ded5543ba9daae9f612b80f01c5107862d" }, operations: [], metadata: BitcoinTransactionMetadata { inputs: [TxIn { previous_output: OutPoint { txid: "0000000000000000000000000000000000000000000000000000000000000000", vout: 4294967295 }, script_sig: "016a0101", sequence: 4294967295, witness: [] }], outputs: [TxOut { value: 5000017500, script_pubkey: "76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac" }, TxOut { value: 0, script_pubkey: "6a24aa21a9ed71aaf7e5384879a1b112bf623ac8b46dd88b39c3d2c6f8a1d264fc4463e6356a" }], stacks_operations: [] } }, BitcoinTransactionData { transaction_identifier: TransactionIdentifier { hash: "0x72e8e43afc4362cf921ccc57fde3e07b4cb6fac5f306525c86d38234c18e21d1" }, operations: [], metadata: BitcoinTransactionMetadata { inputs: [TxIn { previous_output: OutPoint { txid: "e654501805d80d59ef0d95b57ad7a924f3be4a4dc0db5a785dfebe1f70c4e23e", vout: 3 }, script_sig: "4730440220798bb7d7fb14df35610db2ef04e5d5b6588440b7c429bf650a96f8570904052b02204a817e13e7296a24a8f6cc8737bddb55d1835e513ec2b9dcb03424e4536ae34c01210239810ebf35e6f6c26062c99f3e183708d377720617c90a986859ec9c95d00be9", sequence: 4294967293, witness: [] }], outputs: [TxOut { value: 0, script_pubkey: "6a4c5069645b504d310fc27c86a6b65d0b0e0297db1e185d3432fdab9fa96a1053407ed07b537b8b7d23c6309dfd24340e85b75cff11ad685f8b310c1d2098748a0fffb146ec00000069000100000066000128" }, TxOut { value: 20000, script_pubkey: "76a914000000000000000000000000000000000000000088ac" }, TxOut { value: 4999829750, script_pubkey: "76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac" }], stacks_operations: [PobBlockCommitment(PobBlockCommitmentData { signers: [], stacks_block_hash: "0x5b504d310fc27c86a6b65d0b0e0297db1e185d3432fdab9fa96a1053407ed07b", amount: 20000 })] } }], metadata: BitcoinBlockMetadata }

View File

@@ -0,0 +1,206 @@
use super::super::tests::{helpers, process_bitcoin_blocks_and_check_expectations};
#[test]
fn test_bitcoin_vector_001() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_001());
}
#[test]
fn test_bitcoin_vector_002() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_002());
}
#[test]
fn test_bitcoin_vector_003() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_003());
}
#[test]
fn test_bitcoin_vector_004() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_004());
}
#[test]
fn test_bitcoin_vector_005() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_005());
}
#[test]
fn test_bitcoin_vector_006() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_006());
}
#[test]
fn test_bitcoin_vector_007() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_007());
}
#[test]
fn test_bitcoin_vector_008() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_008());
}
#[test]
fn test_bitcoin_vector_009() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_009());
}
#[test]
fn test_bitcoin_vector_010() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_010());
}
#[test]
fn test_bitcoin_vector_011() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_011());
}
#[test]
fn test_bitcoin_vector_012() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_012());
}
#[test]
fn test_bitcoin_vector_013() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_013());
}
#[test]
fn test_bitcoin_vector_014() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_014());
}
#[test]
fn test_bitcoin_vector_015() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_015());
}
#[test]
fn test_bitcoin_vector_016() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_016());
}
#[test]
fn test_bitcoin_vector_017() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_017());
}
#[test]
fn test_bitcoin_vector_018() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_018());
}
#[test]
fn test_bitcoin_vector_019() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_019());
}
#[test]
fn test_bitcoin_vector_020() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_020());
}
#[test]
fn test_bitcoin_vector_021() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_021());
}
#[test]
fn test_bitcoin_vector_022() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_022());
}
#[test]
fn test_bitcoin_vector_023() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_023());
}
#[test]
fn test_bitcoin_vector_024() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_024());
}
#[test]
fn test_bitcoin_vector_025() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_025());
}
#[test]
fn test_bitcoin_vector_026() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_026());
}
#[test]
fn test_bitcoin_vector_027() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_027());
}
#[test]
fn test_bitcoin_vector_028() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_028());
}
#[test]
fn test_bitcoin_vector_029() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_029());
}
#[test]
fn test_bitcoin_vector_030() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_030());
}
#[test]
fn test_bitcoin_vector_031() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_031());
}
#[test]
fn test_bitcoin_vector_032() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_032());
}
#[test]
fn test_bitcoin_vector_033() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_033());
}
#[test]
fn test_bitcoin_vector_034() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_034());
}
#[test]
fn test_bitcoin_vector_035() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_035());
}
#[test]
fn test_bitcoin_vector_036() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_036());
}
#[test]
fn test_bitcoin_vector_037() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_037());
}
#[test]
fn test_bitcoin_vector_038() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_038());
}
#[test]
fn test_bitcoin_vector_039() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_039());
}
#[test]
fn test_bitcoin_vector_040() {
process_bitcoin_blocks_and_check_expectations(helpers::bitcoin_shapes::get_vector_040());
}
// #[test]
// fn test_bitcoin_vector_041() {
// process_bitcoin_blocks_and_check_expectations(helpers::shapes::get_vector_041());
// }

View File

@@ -0,0 +1,418 @@
use crate::{
indexer::{ChainSegment, ChainSegmentIncompatibility},
utils::Context,
};
use chainhook_types::{
BlockHeader, BlockIdentifier, BlockchainEvent, BlockchainUpdatedWithHeaders,
BlockchainUpdatedWithReorg,
};
use hiro_system_kit::slog;
use std::collections::{BTreeMap, BTreeSet, HashSet};
pub struct ForkScratchPad {
canonical_fork_id: usize,
orphans: BTreeSet<BlockIdentifier>,
forks: BTreeMap<usize, ChainSegment>,
headers_store: BTreeMap<BlockIdentifier, BlockHeader>,
}
pub const CONFIRMED_SEGMENT_MINIMUM_LENGTH: i32 = 7;
impl Default for ForkScratchPad {
fn default() -> Self {
Self::new()
}
}
impl ForkScratchPad {
pub fn new() -> ForkScratchPad {
let mut forks = BTreeMap::new();
forks.insert(0, ChainSegment::new());
let headers_store = BTreeMap::new();
ForkScratchPad {
canonical_fork_id: 0,
orphans: BTreeSet::new(),
forks,
headers_store,
}
}
pub fn can_process_header(&self, header: &BlockHeader) -> bool {
if self.headers_store.is_empty() {
return true;
}
self.headers_store
.contains_key(&header.parent_block_identifier)
}
pub fn process_header(
&mut self,
header: BlockHeader,
ctx: &Context,
) -> Result<Option<BlockchainEvent>, String> {
ctx.try_log(|logger| slog::info!(logger, "Start processing {}", header.block_identifier));
// Keep block data in memory
let entry_exists = self
.headers_store
.insert(header.block_identifier.clone(), header.clone());
if entry_exists.is_some() {
ctx.try_log(|logger| {
slog::warn!(
logger,
"Block {} has already been processed",
header.block_identifier
)
});
return Ok(None);
}
for (i, fork) in self.forks.iter() {
ctx.try_log(|logger| slog::info!(logger, "Active fork {}: {}", i, fork));
}
// Retrieve previous canonical fork
let previous_canonical_fork_id = self.canonical_fork_id;
let previous_canonical_fork = match self.forks.get(&previous_canonical_fork_id) {
Some(fork) => fork.clone(),
None => {
ctx.try_log(|logger| {
slog::error!(logger, "unable to retrieve previous bitcoin fork")
});
return Ok(None);
}
};
let mut fork_updated = None;
for (_, fork) in self.forks.iter_mut() {
let (block_appended, mut new_fork) = fork.try_append_block(&header, ctx);
if block_appended {
if let Some(new_fork) = new_fork.take() {
let fork_id = self.forks.len();
self.forks.insert(fork_id, new_fork);
fork_updated = self.forks.get_mut(&fork_id);
} else {
fork_updated = Some(fork);
}
// A block can only be added to one segment
break;
}
}
let fork_updated = match fork_updated.take() {
Some(fork) => {
ctx.try_log(|logger| {
slog::debug!(
logger,
"Bitcoin {} successfully appended to {}",
header.block_identifier,
fork
)
});
fork
}
None => {
ctx.try_log(|logger| {
slog::error!(
logger,
"Unable to process Bitcoin {} - inboxed for later",
header.block_identifier
)
});
self.orphans.insert(header.block_identifier.clone());
return Ok(None);
}
};
// Process former orphans
let orphans = self.orphans.clone();
let mut orphans_to_untrack = HashSet::new();
let mut at_least_one_orphan_appended = true;
// As long as we are successful appending blocks that were previously unprocessable,
// Keep looping on this backlog
let mut applied = HashSet::new();
let mut forks_created = vec![];
while at_least_one_orphan_appended {
at_least_one_orphan_appended = false;
for orphan_block_identifier in orphans.iter() {
if applied.contains(orphan_block_identifier) {
continue;
}
let block = match self.headers_store.get(orphan_block_identifier) {
Some(block) => block.clone(),
None => continue,
};
let (orphan_appended, mut new_fork) = fork_updated.try_append_block(&block, ctx);
if orphan_appended {
applied.insert(orphan_block_identifier);
orphans_to_untrack.insert(orphan_block_identifier);
if let Some(new_fork) = new_fork.take() {
forks_created.push(new_fork);
}
}
at_least_one_orphan_appended = at_least_one_orphan_appended || orphan_appended;
}
}
// Update orphans
for orphan in orphans_to_untrack.into_iter() {
ctx.try_log(|logger| slog::info!(logger, "Dequeuing orphan {}", orphan));
self.orphans.remove(orphan);
}
// Select canonical fork
let mut canonical_fork_id = 0;
let mut highest_height = 0;
for (fork_id, fork) in self.forks.iter() {
ctx.try_log(|logger| slog::info!(logger, "Active fork: {} - {}", fork_id, fork));
if fork.get_length() >= highest_height {
highest_height = fork.get_length();
canonical_fork_id = *fork_id;
}
}
ctx.try_log(|logger| {
slog::info!(
logger,
"Active fork selected as canonical: {}",
canonical_fork_id
)
});
self.canonical_fork_id = canonical_fork_id;
// Generate chain event from the previous and current canonical forks
let canonical_fork = self.forks.get(&canonical_fork_id).unwrap().clone();
if canonical_fork.eq(&previous_canonical_fork) {
ctx.try_log(|logger| slog::info!(logger, "Canonical fork unchanged"));
return Ok(None);
}
let res = self.generate_block_chain_event(&canonical_fork, &previous_canonical_fork, ctx);
let mut chain_event = match res {
Ok(chain_event) => chain_event,
Err(ChainSegmentIncompatibility::ParentBlockUnknown) => {
self.canonical_fork_id = previous_canonical_fork_id;
return Ok(None);
}
_ => return Ok(None),
};
self.collect_and_prune_confirmed_blocks(&mut chain_event, ctx);
Ok(Some(chain_event))
}
pub fn collect_and_prune_confirmed_blocks(
&mut self,
chain_event: &mut BlockchainEvent,
ctx: &Context,
) {
let (tip, confirmed_blocks) = match chain_event {
BlockchainEvent::BlockchainUpdatedWithHeaders(ref mut event) => {
match event.new_headers.last() {
Some(tip) => (tip.block_identifier.clone(), &mut event.confirmed_headers),
None => return,
}
}
BlockchainEvent::BlockchainUpdatedWithReorg(ref mut event) => {
match event.headers_to_apply.last() {
Some(tip) => (tip.block_identifier.clone(), &mut event.confirmed_headers),
None => return,
}
}
};
let mut forks_to_prune = vec![];
let mut ancestor_identifier = &tip;
// Retrieve the whole canonical segment present in memory, ascending order
// [1] ... [6] [7]
let canonical_segment = {
let mut segment = vec![];
while let Some(ancestor) = self.headers_store.get(ancestor_identifier) {
ancestor_identifier = &ancestor.parent_block_identifier;
segment.push(ancestor.block_identifier.clone());
}
segment
};
if canonical_segment.len() < CONFIRMED_SEGMENT_MINIMUM_LENGTH as usize {
return;
}
// Any block beyond 6th ancestor is considered as confirmed and can be pruned
let cut_off = &canonical_segment[(CONFIRMED_SEGMENT_MINIMUM_LENGTH - 2) as usize];
// Prune forks using the confirmed block
let mut blocks_to_prune = vec![];
for (fork_id, fork) in self.forks.iter_mut() {
let mut res = fork.prune_confirmed_blocks(cut_off);
blocks_to_prune.append(&mut res);
if fork.block_ids.is_empty() {
forks_to_prune.push(*fork_id);
}
}
// Prune orphans using the confirmed block
let iter = self.orphans.clone().into_iter();
for orphan in iter {
if orphan.index < cut_off.index {
self.orphans.remove(&orphan);
blocks_to_prune.push(orphan);
}
}
ctx.try_log(|logger| {
slog::debug!(
logger,
"Removing {} confirmed blocks from block store.",
canonical_segment[6..].len()
)
});
for confirmed_block in canonical_segment[6..].iter() {
let block = match self.headers_store.remove(confirmed_block) {
None => {
ctx.try_log(|logger| {
slog::error!(logger, "unable to retrieve data for {}", confirmed_block)
});
return;
}
Some(block) => block,
};
confirmed_blocks.push(block);
}
// Prune data
ctx.try_log(|logger| {
slog::debug!(
logger,
"Pruning {} blocks and {} forks.",
blocks_to_prune.len(),
forks_to_prune.len()
)
});
for block_to_prune in blocks_to_prune {
self.headers_store.remove(&block_to_prune);
}
for fork_id in forks_to_prune {
self.forks.remove(&fork_id);
}
confirmed_blocks.reverse();
}
pub fn generate_block_chain_event(
&mut self,
canonical_segment: &ChainSegment,
other_segment: &ChainSegment,
ctx: &Context,
) -> Result<BlockchainEvent, ChainSegmentIncompatibility> {
if other_segment.is_empty() {
let mut new_headers = vec![];
for i in 0..canonical_segment.block_ids.len() {
let block_identifier =
&canonical_segment.block_ids[canonical_segment.block_ids.len() - 1 - i];
let header = match self.headers_store.get(block_identifier) {
Some(block) => block.clone(),
None => {
ctx.try_log(|logger| {
slog::error!(
logger,
"unable to retrieve Bitcoin block {} from block store",
block_identifier
)
});
return Err(ChainSegmentIncompatibility::Unknown);
}
};
new_headers.push(header)
}
return Ok(BlockchainEvent::BlockchainUpdatedWithHeaders(
BlockchainUpdatedWithHeaders {
new_headers,
confirmed_headers: vec![],
},
));
}
if let Ok(divergence) = canonical_segment.try_identify_divergence(other_segment, false, ctx)
{
if divergence.block_ids_to_rollback.is_empty() {
let mut new_headers = vec![];
for i in 0..divergence.block_ids_to_apply.len() {
let block_identifier = &divergence.block_ids_to_apply[i];
let header = match self.headers_store.get(block_identifier) {
Some(header) => header.clone(),
None => {
ctx.try_log(|logger| {
slog::error!(
logger,
"unable to retrieve Bitcoin block {} from block store",
block_identifier
)
});
return Err(ChainSegmentIncompatibility::Unknown);
}
};
new_headers.push(header)
}
return Ok(BlockchainEvent::BlockchainUpdatedWithHeaders(
BlockchainUpdatedWithHeaders {
new_headers,
confirmed_headers: vec![],
},
));
} else {
return Ok(BlockchainEvent::BlockchainUpdatedWithReorg(
BlockchainUpdatedWithReorg {
headers_to_rollback: divergence
.block_ids_to_rollback
.iter()
.map(|block_id| {
let block = match self.headers_store.get(block_id) {
Some(block) => block.clone(),
None => {
ctx.try_log(|logger| {
slog::error!(
logger,
"unable to retrieve Bitcoin block {} from block store",
block_id
)
});
return Err(ChainSegmentIncompatibility::Unknown);
}
};
Ok(block)
})
.collect::<Result<Vec<_>, _>>()?,
headers_to_apply: divergence
.block_ids_to_apply
.iter()
.map(|block_id| {
let block = match self.headers_store.get(block_id) {
Some(block) => block.clone(),
None => {
ctx.try_log(|logger| {
slog::error!(
logger,
"unable to retrieve Bitcoin block {} from block store",
block_id
)
});
return Err(ChainSegmentIncompatibility::Unknown);
}
};
Ok(block)
})
.collect::<Result<Vec<_>, _>>()?,
confirmed_headers: vec![],
},
));
}
}
ctx.try_log(|logger| {
slog::debug!(
logger,
"Unable to infer chain event out of {} and {}",
canonical_segment,
other_segment
)
});
Err(ChainSegmentIncompatibility::ParentBlockUnknown)
}
}

View File

@@ -0,0 +1,321 @@
pub mod bitcoin;
pub mod fork_scratch_pad;
use crate::utils::{AbstractBlock, Context};
use chainhook_types::{
BitcoinBlockSignaling, BitcoinNetwork, BlockHeader, BlockIdentifier, BlockchainEvent,
};
use hiro_system_kit::slog;
use std::collections::VecDeque;
use self::fork_scratch_pad::ForkScratchPad;
#[derive(Deserialize, Debug, Clone, Default)]
pub struct AssetClassCache {
pub symbol: String,
pub decimals: u8,
}
pub struct BitcoinChainContext {}
impl Default for BitcoinChainContext {
fn default() -> Self {
Self::new()
}
}
impl BitcoinChainContext {
pub fn new() -> BitcoinChainContext {
BitcoinChainContext {}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct IndexerConfig {
pub bitcoin_network: BitcoinNetwork,
pub bitcoind_rpc_url: String,
pub bitcoind_rpc_username: String,
pub bitcoind_rpc_password: String,
pub bitcoin_block_signaling: BitcoinBlockSignaling,
}
pub struct Indexer {
pub config: IndexerConfig,
bitcoin_blocks_pool: ForkScratchPad,
pub bitcoin_context: BitcoinChainContext,
}
impl Indexer {
pub fn new(config: IndexerConfig) -> Indexer {
let bitcoin_blocks_pool = ForkScratchPad::new();
let bitcoin_context = BitcoinChainContext::new();
Indexer {
config,
bitcoin_blocks_pool,
bitcoin_context,
}
}
pub fn handle_bitcoin_header(
&mut self,
header: BlockHeader,
ctx: &Context,
) -> Result<Option<BlockchainEvent>, String> {
self.bitcoin_blocks_pool.process_header(header, ctx)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ChainSegment {
pub block_ids: VecDeque<BlockIdentifier>,
}
#[derive(Clone, Debug)]
pub enum ChainSegmentIncompatibility {
OutdatedBlock,
OutdatedSegment,
BlockCollision,
ParentBlockUnknown,
AlreadyPresent,
Unknown,
BlockNotFound,
}
#[derive(Debug)]
pub struct ChainSegmentDivergence {
block_ids_to_apply: Vec<BlockIdentifier>,
block_ids_to_rollback: Vec<BlockIdentifier>,
}
impl Default for ChainSegment {
fn default() -> Self {
Self::new()
}
}
impl ChainSegment {
pub fn new() -> ChainSegment {
let block_ids = VecDeque::new();
ChainSegment { block_ids }
}
fn is_empty(&self) -> bool {
self.block_ids.is_empty()
}
fn is_block_id_newer_than_segment(&self, block_identifier: &BlockIdentifier) -> bool {
if let Some(tip) = self.block_ids.front() {
return block_identifier.index > (tip.index + 1);
}
false
}
fn get_relative_index(&self, block_identifier: &BlockIdentifier) -> usize {
if let Some(tip) = self.block_ids.front() {
let segment_index = tip.index.saturating_sub(block_identifier.index);
return segment_index.try_into().unwrap();
}
0
}
fn can_append_block(
&self,
block: &dyn AbstractBlock,
ctx: &Context,
) -> Result<(), ChainSegmentIncompatibility> {
if self.is_block_id_newer_than_segment(block.get_identifier()) {
// Chain segment looks outdated, we should just prune it?
return Err(ChainSegmentIncompatibility::OutdatedSegment);
}
let tip = match self.block_ids.front() {
Some(tip) => tip,
None => return Ok(()),
};
ctx.try_log(|logger| {
slog::info!(logger, "Comparing {} with {}", tip, block.get_identifier())
});
if tip.index == block.get_parent_identifier().index {
match tip.hash == block.get_parent_identifier().hash {
true => return Ok(()),
false => return Err(ChainSegmentIncompatibility::ParentBlockUnknown),
}
}
if let Some(colliding_block) = self.get_block_id(block.get_identifier(), ctx) {
match colliding_block.eq(block.get_identifier()) {
true => return Err(ChainSegmentIncompatibility::AlreadyPresent),
false => return Err(ChainSegmentIncompatibility::BlockCollision),
}
}
Err(ChainSegmentIncompatibility::Unknown)
}
fn get_block_id(&self, block_id: &BlockIdentifier, _ctx: &Context) -> Option<&BlockIdentifier> {
match self.block_ids.get(self.get_relative_index(block_id)) {
Some(res) => Some(res),
None => None,
}
}
pub fn append_block_identifier(&mut self, block_identifier: &BlockIdentifier) {
self.block_ids.push_front(block_identifier.clone());
}
pub fn prune_confirmed_blocks(&mut self, cut_off: &BlockIdentifier) -> Vec<BlockIdentifier> {
let mut keep = vec![];
let mut prune = vec![];
for block_id in self.block_ids.drain(..) {
if block_id.index >= cut_off.index {
keep.push(block_id);
} else {
prune.push(block_id);
}
}
for block_id in keep.into_iter() {
self.block_ids.push_back(block_id);
}
prune
}
pub fn get_tip(&self) -> &BlockIdentifier {
self.block_ids.front().unwrap()
}
pub fn get_length(&self) -> u64 {
self.block_ids.len().try_into().unwrap()
}
pub fn keep_blocks_from_oldest_to_block_identifier(
&mut self,
block_identifier: &BlockIdentifier,
) -> (bool, bool) {
let mut mutated = false;
loop {
match self.block_ids.pop_front() {
Some(tip) => {
if tip.eq(block_identifier) {
self.block_ids.push_front(tip);
break (true, mutated);
}
}
_ => break (false, mutated),
}
mutated = true;
}
}
fn try_identify_divergence(
&self,
other_segment: &ChainSegment,
allow_reset: bool,
ctx: &Context,
) -> Result<ChainSegmentDivergence, ChainSegmentIncompatibility> {
let mut common_root = None;
let mut block_ids_to_rollback = vec![];
let mut block_ids_to_apply = vec![];
for cursor_segment_1 in other_segment.block_ids.iter() {
block_ids_to_apply.clear();
for cursor_segment_2 in self.block_ids.iter() {
if cursor_segment_2.eq(cursor_segment_1) {
common_root = Some(cursor_segment_2.clone());
break;
}
block_ids_to_apply.push(cursor_segment_2.clone());
}
if common_root.is_some() {
break;
}
block_ids_to_rollback.push(cursor_segment_1.clone());
}
ctx.try_log(|logger| {
slog::debug!(logger, "Blocks to rollback: {:?}", block_ids_to_rollback)
});
ctx.try_log(|logger| slog::debug!(logger, "Blocks to apply: {:?}", block_ids_to_apply));
block_ids_to_apply.reverse();
match common_root.take() {
Some(_common_root) => Ok(ChainSegmentDivergence {
block_ids_to_rollback,
block_ids_to_apply,
}),
None if allow_reset => Ok(ChainSegmentDivergence {
block_ids_to_rollback,
block_ids_to_apply,
}),
None => Err(ChainSegmentIncompatibility::Unknown),
}
}
fn try_append_block(
&mut self,
block: &dyn AbstractBlock,
ctx: &Context,
) -> (bool, Option<ChainSegment>) {
let mut block_appended = false;
let mut fork = None;
ctx.try_log(|logger| {
slog::info!(
logger,
"Trying to append {} to {}",
block.get_identifier(),
self
)
});
match self.can_append_block(block, ctx) {
Ok(()) => {
self.append_block_identifier(block.get_identifier());
block_appended = true;
}
Err(incompatibility) => {
ctx.try_log(|logger| {
slog::warn!(logger, "Will have to fork: {:?}", incompatibility)
});
match incompatibility {
ChainSegmentIncompatibility::BlockCollision => {
let mut new_fork = self.clone();
let (parent_found, _) = new_fork
.keep_blocks_from_oldest_to_block_identifier(
block.get_parent_identifier(),
);
if parent_found {
ctx.try_log(|logger| slog::info!(logger, "Success"));
new_fork.append_block_identifier(block.get_identifier());
fork = Some(new_fork);
block_appended = true;
}
}
ChainSegmentIncompatibility::OutdatedSegment => {
// TODO(lgalabru): test depth
// fork_ids_to_prune.push(fork_id);
}
ChainSegmentIncompatibility::ParentBlockUnknown => {}
ChainSegmentIncompatibility::OutdatedBlock => {}
ChainSegmentIncompatibility::Unknown => {}
ChainSegmentIncompatibility::AlreadyPresent => {}
ChainSegmentIncompatibility::BlockNotFound => {}
}
}
}
(block_appended, fork)
}
}
impl std::fmt::Display for ChainSegment {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"Fork [{}], height = {}",
self.block_ids
.iter()
.map(|b| format!("{}", b))
.collect::<Vec<_>>()
.join(", "),
self.get_length()
)
}
}
#[cfg(test)]
pub mod tests;

View File

@@ -0,0 +1,79 @@
pub fn deployer_stx_address() -> String {
"ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM".to_string()
}
pub fn wallet_1_stx_address() -> String {
"ST1SJ3DTE5DN7X54YDH5D64R3BCB6A2AG2ZQ8YPD5".to_string()
}
pub fn wallet_2_stx_address() -> String {
"ST2CY5V39NHDPWSXMW9QDT3HC3GD6Q6XX4CFRK9AG".to_string()
}
pub fn wallet_3_stx_address() -> String {
"ST2JHG361ZXG51QTKY2NQCVBPPRRE2KZB1HR05NNC".to_string()
}
pub fn wallet_4_stx_address() -> String {
"ST2NEB84ASENDXKYGJPQW86YXQCEFEX2ZQPG87ND".to_string()
}
pub fn wallet_5_stx_address() -> String {
"ST2REHHS5J3CERCRBEPMGH7921Q6PYKAADT7JP2VB".to_string()
}
pub fn wallet_6_stx_address() -> String {
"ST3AM1A56AK2C1XAFJ4115ZSV26EB49BVQ10MGCS0".to_string()
}
pub fn wallet_7_stx_address() -> String {
"ST3PF13W7Z0RRM42A8VZRVFQ75SV1K26RXEP8YGKJ".to_string()
}
pub fn wallet_8_stx_address() -> String {
"ST3NBRSFKX28FQ2ZJ1MAKX58HKHSDGNV5N7R21XCP".to_string()
}
pub fn wallet_9_stx_address() -> String {
"STNHKEPYEPJ8ET55ZZ0M5A34J0R3N5FM2CMMMAZ6".to_string()
}
pub fn deployer_btc_address() -> String {
"mqVnk6NPRdhntvfm4hh9vvjiRkFDUuSYsH".to_string()
}
pub fn wallet_1_btc_address() -> String {
"mr1iPkD9N3RJZZxXRk7xF9d36gffa6exNC".to_string()
}
pub fn wallet_2_btc_address() -> String {
"muYdXKmX9bByAueDe6KFfHd5Ff1gdN9ErG".to_string()
}
pub fn wallet_3_btc_address() -> String {
"mvZtbibDAAA3WLpY7zXXFqRa3T4XSknBX7".to_string()
}
pub fn wallet_4_btc_address() -> String {
"mg1C76bNTutiCDV3t9nWhZs3Dc8LzUufj8".to_string()
}
pub fn wallet_5_btc_address() -> String {
"mweN5WVqadScHdA81aATSdcVr4B6dNokqx".to_string()
}
pub fn wallet_6_btc_address() -> String {
"mzxXgV6e4BZSsz8zVHm3TmqbECt7mbuErt".to_string()
}
pub fn wallet_7_btc_address() -> String {
"n37mwmru2oaVosgfuvzBwgV2ysCQRrLko7".to_string()
}
pub fn wallet_8_btc_address() -> String {
"n2v875jbJ4RjBnTjgbfikDfnwsDV5iUByw".to_string()
}
pub fn wallet_9_btc_address() -> String {
"mjSrB3wS4xab3kYqFktwBzfTdPg367ZJ2d".to_string()
}

View File

@@ -0,0 +1,148 @@
use chainhook_types::{
BitcoinBlockData, BitcoinBlockMetadata, BitcoinTransactionData, BlockIdentifier,
};
pub fn generate_test_bitcoin_block(
fork_id: u8,
block_height: u64,
transactions: Vec<BitcoinTransactionData>,
parent: Option<BitcoinBlockData>,
) -> BitcoinBlockData {
let mut hash = vec![
fork_id, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
let parent_height = match block_height {
0 => 0,
_ => block_height - 1,
};
let parent_block_identifier = match parent {
Some(parent) => {
assert_eq!(parent.block_identifier.index, parent_height);
parent.block_identifier.clone()
}
_ => {
let mut parent_hash = if parent_height == 1 {
vec![
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
]
} else {
vec![
fork_id, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
]
};
parent_hash.append(&mut parent_height.to_be_bytes().to_vec());
BlockIdentifier {
index: parent_height,
hash: format!("0x{}", hex::encode(&parent_hash[..])),
}
}
};
hash.append(&mut block_height.to_be_bytes().to_vec());
BitcoinBlockData {
block_identifier: BlockIdentifier {
index: block_height,
hash: format!("0x{}", hex::encode(&hash[..])),
},
parent_block_identifier,
timestamp: 0,
transactions,
metadata: BitcoinBlockMetadata {
network: chainhook_types::BitcoinNetwork::Regtest,
},
}
}
pub fn A1(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(0, 1, vec![], parent)
}
pub fn B1(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(1, 2, vec![], parent)
}
pub fn B2(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(2, 2, vec![], parent)
}
pub fn C1(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(1, 3, vec![], parent)
}
pub fn C2(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(2, 3, vec![], parent)
}
pub fn D1(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(1, 4, vec![], parent)
}
pub fn D2(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(2, 4, vec![], parent)
}
pub fn E1(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(1, 5, vec![], parent)
}
pub fn E2(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(2, 5, vec![], parent)
}
pub fn B3(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(3, 2, vec![], parent)
}
pub fn C3(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(3, 3, vec![], parent)
}
pub fn D3(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(3, 4, vec![], parent)
}
pub fn E3(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(3, 5, vec![], parent)
}
pub fn F1(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(1, 6, vec![], parent)
}
pub fn F2(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(2, 6, vec![], parent)
}
pub fn F3(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(3, 6, vec![], parent)
}
pub fn G1(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(1, 7, vec![], parent)
}
pub fn G2(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(2, 7, vec![], parent)
}
pub fn G3(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(3, 7, vec![], parent)
}
pub fn H1(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(1, 8, vec![], parent)
}
pub fn H3(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(3, 8, vec![], parent)
}
pub fn I1(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(1, 9, vec![], parent)
}
pub fn I3(parent: Option<BitcoinBlockData>) -> BitcoinBlockData {
generate_test_bitcoin_block(3, 9, vec![], parent)
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,5 @@
pub mod accounts;
#[allow(non_snake_case, unreachable_code)]
pub mod bitcoin_blocks;
pub mod bitcoin_shapes;
pub mod transactions;

View File

@@ -0,0 +1,70 @@
use base58::FromBase58;
use bitcoincore_rpc::bitcoin::blockdata::opcodes;
use bitcoincore_rpc::bitcoin::blockdata::script::Builder as BitcoinScriptBuilder;
use chainhook_types::{bitcoin::TxOut, BitcoinTransactionData, BitcoinTransactionMetadata, TransactionIdentifier};
pub fn generate_test_tx_bitcoin_p2pkh_transfer(
txid: u64,
_sender: &str,
recipient: &str,
amount: u64,
) -> BitcoinTransactionData {
let mut hash = vec![
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
hash.append(&mut txid.to_be_bytes().to_vec());
// Preparing metadata
let pubkey_hash = recipient
.from_base58()
.expect("Unable to get bytes from btc address");
let slice = [
pubkey_hash[1],
pubkey_hash[2],
pubkey_hash[3],
pubkey_hash[4],
pubkey_hash[5],
pubkey_hash[6],
pubkey_hash[7],
pubkey_hash[8],
pubkey_hash[9],
pubkey_hash[10],
pubkey_hash[11],
pubkey_hash[12],
pubkey_hash[13],
pubkey_hash[14],
pubkey_hash[15],
pubkey_hash[16],
pubkey_hash[17],
pubkey_hash[18],
pubkey_hash[19],
pubkey_hash[20],
];
let script = BitcoinScriptBuilder::new()
.push_opcode(opcodes::all::OP_DUP)
.push_opcode(opcodes::all::OP_HASH160)
.push_slice(slice)
.push_opcode(opcodes::all::OP_EQUALVERIFY)
.push_opcode(opcodes::all::OP_CHECKSIG)
.into_script();
let outputs = vec![TxOut {
value: amount,
script_pubkey: format!("0x{}", hex::encode(script.as_bytes())),
}];
BitcoinTransactionData {
transaction_identifier: TransactionIdentifier {
hash: format!("0x{}", hex::encode(&hash[..])),
},
operations: vec![],
metadata: BitcoinTransactionMetadata {
inputs: vec![],
outputs,
ordinal_operations: vec![],
brc20_operation: None,
proof: None,
fee: 0,
index: 0,
},
}
}

View File

@@ -0,0 +1,19 @@
pub mod helpers;
use crate::utils::{AbstractBlock, Context};
use super::fork_scratch_pad::ForkScratchPad;
use chainhook_types::{BitcoinBlockData, BlockchainEvent};
pub type BlockchainEventExpectation = Box<dyn Fn(Option<BlockchainEvent>)>;
pub fn process_bitcoin_blocks_and_check_expectations(
steps: Vec<(BitcoinBlockData, BlockchainEventExpectation)>,
) {
let mut blocks_processor = ForkScratchPad::new();
for (block, check_chain_event_expectations) in steps.into_iter() {
let chain_event = blocks_processor
.process_header(block.get_header(), &Context::empty())
.unwrap();
check_chain_event_expectations(chain_event);
}
}

View File

@@ -0,0 +1,13 @@
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
pub use bitcoincore_rpc;
pub mod indexer;
pub mod observer;
pub mod utils;

View File

@@ -0,0 +1,752 @@
mod zmq;
use crate::indexer::bitcoin::{
build_http_client, download_and_parse_block_with_retry, standardize_bitcoin_block,
BitcoinBlockFullBreakdown,
};
use crate::utils::Context;
use chainhook_types::{
BitcoinBlockData, BitcoinBlockSignaling, BitcoinChainEvent, BitcoinChainUpdatedWithBlocksData,
BitcoinChainUpdatedWithReorgData, BitcoinNetwork, BlockIdentifier, BlockchainEvent,
};
use hiro_system_kit;
use hiro_system_kit::slog;
use rocket::serde::Deserialize;
use rocket::Shutdown;
use std::collections::HashMap;
use std::error::Error;
use std::str;
use std::sync::mpsc::{Receiver, Sender};
#[derive(Deserialize)]
pub struct NewTransaction {
pub txid: String,
pub status: String,
pub raw_result: String,
pub raw_tx: String,
}
#[derive(Clone, Debug)]
pub enum Event {
BitcoinChainEvent(BitcoinChainEvent),
}
#[derive(Debug, Clone)]
pub struct EventObserverConfig {
pub bitcoind_rpc_username: String,
pub bitcoind_rpc_password: String,
pub bitcoind_rpc_url: String,
pub bitcoin_block_signaling: BitcoinBlockSignaling,
pub bitcoin_network: BitcoinNetwork,
}
/// A builder that is used to create a general purpose [EventObserverConfig].
///
/// ## Examples
/// ```
/// use chainhook_sdk::observer::EventObserverConfig;
/// use chainhook_sdk::observer::EventObserverConfigBuilder;
///
/// fn get_config() -> Result<EventObserverConfig, String> {
/// EventObserverConfigBuilder::new()
/// .bitcoind_rpc_password("my_password")
/// .bitcoin_network("mainnet")
/// .finish()
/// }
/// ```
#[derive(Deserialize, Debug, Clone)]
pub struct EventObserverConfigBuilder {
pub bitcoind_rpc_username: Option<String>,
pub bitcoind_rpc_password: Option<String>,
pub bitcoind_rpc_url: Option<String>,
pub bitcoind_zmq_url: Option<String>,
pub bitcoin_network: Option<String>,
}
impl Default for EventObserverConfigBuilder {
fn default() -> Self {
Self::new()
}
}
impl EventObserverConfigBuilder {
pub fn new() -> Self {
EventObserverConfigBuilder {
bitcoind_rpc_username: None,
bitcoind_rpc_password: None,
bitcoind_rpc_url: None,
bitcoind_zmq_url: None,
bitcoin_network: None,
}
}
/// Sets the bitcoind node's RPC username.
pub fn bitcoind_rpc_username(&mut self, username: &str) -> &mut Self {
self.bitcoind_rpc_username = Some(username.to_string());
self
}
/// Sets the bitcoind node's RPC password.
pub fn bitcoind_rpc_password(&mut self, password: &str) -> &mut Self {
self.bitcoind_rpc_password = Some(password.to_string());
self
}
/// Sets the bitcoind node's RPC url.
pub fn bitcoind_rpc_url(&mut self, url: &str) -> &mut Self {
self.bitcoind_rpc_url = Some(url.to_string());
self
}
/// Sets the bitcoind node's ZMQ url, used by the observer to receive new block events from bitcoind.
pub fn bitcoind_zmq_url(&mut self, url: &str) -> &mut Self {
self.bitcoind_zmq_url = Some(url.to_string());
self
}
/// Sets the Bitcoin network. Must be a valid bitcoin network string according to [BitcoinNetwork::from_str].
pub fn bitcoin_network(&mut self, network: &str) -> &mut Self {
self.bitcoin_network = Some(network.to_string());
self
}
/// Attempts to convert a [EventObserverConfigBuilder] instance into an [EventObserverConfig], filling in
/// defaults as necessary according to [EventObserverConfig::default].
///
/// This function will return an error if the `bitcoin_network` or `stacks_network` strings are set and are not a valid [BitcoinNetwork] or [StacksNetwork].
///
pub fn finish(&self) -> Result<EventObserverConfig, String> {
EventObserverConfig::new_using_overrides(Some(self))
}
}
impl EventObserverConfig {
pub fn default() -> Self {
EventObserverConfig {
bitcoind_rpc_username: "devnet".into(),
bitcoind_rpc_password: "devnet".into(),
bitcoind_rpc_url: "http://localhost:18443".into(),
bitcoin_block_signaling: BitcoinBlockSignaling::ZeroMQ(
"tcp://localhost:18543".to_string(),
),
bitcoin_network: BitcoinNetwork::Regtest,
}
}
pub fn get_bitcoin_config(&self) -> BitcoinConfig {
BitcoinConfig {
username: self.bitcoind_rpc_username.clone(),
password: self.bitcoind_rpc_password.clone(),
rpc_url: self.bitcoind_rpc_url.clone(),
network: self.bitcoin_network.clone(),
bitcoin_block_signaling: self.bitcoin_block_signaling.clone(),
}
}
/// Helper to allow overriding some default fields in creating a new EventObserverConfig.
///
/// *Note: This is used by external crates, so it should not be removed, even if not used internally by Chainhook.*
pub fn new_using_overrides(
overrides: Option<&EventObserverConfigBuilder>,
) -> Result<EventObserverConfig, String> {
let bitcoin_network =
if let Some(network) = overrides.and_then(|c| c.bitcoin_network.as_ref()) {
BitcoinNetwork::from_str(network)?
} else {
BitcoinNetwork::Regtest
};
let config = EventObserverConfig {
bitcoind_rpc_username: overrides
.and_then(|c| c.bitcoind_rpc_username.clone())
.unwrap_or_else(|| "devnet".to_string()),
bitcoind_rpc_password: overrides
.and_then(|c| c.bitcoind_rpc_password.clone())
.unwrap_or_else(|| "devnet".to_string()),
bitcoind_rpc_url: overrides
.and_then(|c| c.bitcoind_rpc_url.clone())
.unwrap_or_else(|| "http://localhost:18443".to_string()),
bitcoin_block_signaling: overrides
.and_then(|c| c.bitcoind_zmq_url.as_ref())
.map(|url| BitcoinBlockSignaling::ZeroMQ(url.clone()))
.unwrap_or_else(|| {
BitcoinBlockSignaling::ZeroMQ("tcp://localhost:18543".to_string())
}),
bitcoin_network,
};
Ok(config)
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum ObserverCommand {
ProcessBitcoinBlock(BitcoinBlockFullBreakdown),
CacheBitcoinBlock(BitcoinBlockData),
PropagateBitcoinChainEvent(BlockchainEvent),
Terminate,
}
#[derive(Clone, Debug, PartialEq)]
pub struct HookExpirationData {
pub hook_uuid: String,
pub block_height: u64,
}
#[derive(Clone, Debug, PartialEq)]
pub struct MempoolAdmissionData {
pub tx_data: String,
pub tx_description: String,
}
#[derive(Clone, Debug)]
pub enum ObserverEvent {
Error(String),
Fatal(String),
Info(String),
Terminate,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
/// JSONRPC Request
pub struct BitcoinRPCRequest {
/// The name of the RPC call
pub method: String,
/// Parameters to the RPC call
pub params: serde_json::Value,
/// Identifier for this Request, which should appear in the response
pub id: serde_json::Value,
/// jsonrpc field, MUST be "2.0"
pub jsonrpc: serde_json::Value,
}
#[derive(Debug, Clone)]
pub struct BitcoinConfig {
pub username: String,
pub password: String,
pub rpc_url: String,
pub network: BitcoinNetwork,
pub bitcoin_block_signaling: BitcoinBlockSignaling,
}
#[derive(Debug, Clone)]
pub struct BitcoinBlockDataCached {
pub block: BitcoinBlockData,
pub processed_by_sidecar: bool,
}
pub struct ObserverSidecar {
pub bitcoin_blocks_mutator: Option<(
crossbeam_channel::Sender<(Vec<BitcoinBlockDataCached>, Vec<BlockIdentifier>)>,
crossbeam_channel::Receiver<Vec<BitcoinBlockDataCached>>,
)>,
pub bitcoin_chain_event_notifier: Option<crossbeam_channel::Sender<HandleBlock>>,
}
impl ObserverSidecar {
fn perform_bitcoin_sidecar_mutations(
&self,
blocks: Vec<BitcoinBlockDataCached>,
blocks_ids_to_rollback: Vec<BlockIdentifier>,
ctx: &Context,
) -> Vec<BitcoinBlockDataCached> {
if let Some(ref block_mutator) = self.bitcoin_blocks_mutator {
ctx.try_log(|logger| slog::info!(logger, "Sending blocks to pre-processor",));
let _ = block_mutator
.0
.send((blocks.clone(), blocks_ids_to_rollback));
ctx.try_log(|logger| slog::info!(logger, "Waiting for blocks from pre-processor",));
match block_mutator.1.recv() {
Ok(updated_blocks) => {
ctx.try_log(|logger| slog::info!(logger, "Block received from pre-processor",));
updated_blocks
}
Err(e) => {
ctx.try_log(|logger| {
slog::error!(
logger,
"Unable to receive block from pre-processor {}",
e.to_string()
)
});
blocks
}
}
} else {
blocks
}
}
fn notify_chain_event(&self, chain_event: &BitcoinChainEvent, _ctx: &Context) {
if let Some(ref notifier) = self.bitcoin_chain_event_notifier {
match chain_event {
BitcoinChainEvent::ChainUpdatedWithBlocks(data) => {
for block in data.new_blocks.iter() {
let _ = notifier.send(HandleBlock::ApplyBlock(block.clone()));
}
}
BitcoinChainEvent::ChainUpdatedWithReorg(data) => {
for block in data.blocks_to_rollback.iter() {
let _ = notifier.send(HandleBlock::UndoBlock(block.clone()));
}
for block in data.blocks_to_apply.iter() {
let _ = notifier.send(HandleBlock::ApplyBlock(block.clone()));
}
}
}
}
}
}
/// A helper struct used to configure and call [start_event_observer], which spawns a thread to observer chain events.
///
/// ### Examples
/// ```
/// use chainhook_sdk::observer::EventObserverBuilder;
/// use chainhook_sdk::observer::EventObserverConfig;
/// use chainhook_sdk::observer::ObserverCommand;
/// use chainhook_sdk::utils::Context;
/// use std::error::Error;
/// use std::sync::mpsc::{Receiver, Sender};
///
/// fn start_event_observer(
/// config: EventObserverConfig,
/// observer_commands_tx: &Sender<ObserverCommand>,
/// observer_commands_rx: Receiver<ObserverCommand>,
/// ctx: &Context,
/// )-> Result<(), Box<dyn Error>> {
/// EventObserverBuilder::new(
/// config,
/// &observer_commands_tx,
/// observer_commands_rx,
/// &ctx
/// )
/// .start()
/// }
/// ```
pub struct EventObserverBuilder {
config: EventObserverConfig,
observer_commands_tx: Sender<ObserverCommand>,
observer_commands_rx: Receiver<ObserverCommand>,
ctx: Context,
observer_events_tx: Option<crossbeam_channel::Sender<ObserverEvent>>,
observer_sidecar: Option<ObserverSidecar>,
}
impl EventObserverBuilder {
pub fn new(
config: EventObserverConfig,
observer_commands_tx: &Sender<ObserverCommand>,
observer_commands_rx: Receiver<ObserverCommand>,
ctx: &Context,
) -> Self {
EventObserverBuilder {
config,
observer_commands_tx: observer_commands_tx.clone(),
observer_commands_rx,
ctx: ctx.clone(),
observer_events_tx: None,
observer_sidecar: None,
}
}
/// Sets the `observer_events_tx` Sender. Set this and listen on the corresponding
/// Receiver to be notified of every [ObserverEvent].
pub fn events_tx(
&mut self,
observer_events_tx: crossbeam_channel::Sender<ObserverEvent>,
) -> &mut Self {
self.observer_events_tx = Some(observer_events_tx);
self
}
/// Sets a sidecar for the observer. See [ObserverSidecar].
pub fn sidecar(&mut self, sidecar: ObserverSidecar) -> &mut Self {
self.observer_sidecar = Some(sidecar);
self
}
/// Starts the event observer, calling [start_event_observer]. This function consumes the
/// [EventObserverBuilder] and spawns a new thread to run the observer.
pub fn start(self) -> Result<(), Box<dyn Error>> {
start_event_observer(
self.config,
self.observer_commands_tx,
self.observer_commands_rx,
self.observer_events_tx,
self.observer_sidecar,
self.ctx,
)
}
}
/// Spawns a thread to observe blockchain events. Use [EventObserverBuilder] to configure easily.
pub fn start_event_observer(
config: EventObserverConfig,
observer_commands_tx: Sender<ObserverCommand>,
observer_commands_rx: Receiver<ObserverCommand>,
observer_events_tx: Option<crossbeam_channel::Sender<ObserverEvent>>,
observer_sidecar: Option<ObserverSidecar>,
ctx: Context,
) -> Result<(), Box<dyn Error>> {
match config.bitcoin_block_signaling {
BitcoinBlockSignaling::ZeroMQ(ref url) => {
ctx.try_log(|logger| {
slog::info!(logger, "Observing Bitcoin chain events via ZeroMQ: {}", url)
});
let context_cloned = ctx.clone();
let event_observer_config_moved = config.clone();
let observer_commands_tx_moved = observer_commands_tx.clone();
let _ = hiro_system_kit::thread_named("Chainhook event observer")
.spawn(move || {
let future = start_bitcoin_event_observer(
event_observer_config_moved,
observer_commands_tx_moved,
observer_commands_rx,
observer_events_tx.clone(),
observer_sidecar,
context_cloned.clone(),
);
match hiro_system_kit::nestable_block_on(future) {
Ok(_) => {}
Err(e) => {
if let Some(tx) = observer_events_tx {
context_cloned.try_log(|logger| {
slog::crit!(
logger,
"Chainhook event observer thread failed with error: {e}",
)
});
let _ = tx.send(ObserverEvent::Terminate);
}
}
}
})
.expect("unable to spawn thread");
}
}
Ok(())
}
pub async fn start_bitcoin_event_observer(
config: EventObserverConfig,
_observer_commands_tx: Sender<ObserverCommand>,
observer_commands_rx: Receiver<ObserverCommand>,
observer_events_tx: Option<crossbeam_channel::Sender<ObserverEvent>>,
observer_sidecar: Option<ObserverSidecar>,
ctx: Context,
) -> Result<(), Box<dyn Error>> {
let ctx_moved = ctx.clone();
let config_moved = config.clone();
let _ = hiro_system_kit::thread_named("ZMQ handler").spawn(move || {
let future = zmq::start_zeromq_runloop(&config_moved, _observer_commands_tx, &ctx_moved);
hiro_system_kit::nestable_block_on(future);
});
// This loop is used for handling background jobs, emitted by HTTP calls.
start_observer_commands_handler(
config,
observer_commands_rx,
observer_events_tx,
None,
observer_sidecar,
ctx,
)
.await
}
pub enum HandleBlock {
ApplyBlock(BitcoinBlockData),
UndoBlock(BitcoinBlockData),
}
pub async fn start_observer_commands_handler(
config: EventObserverConfig,
observer_commands_rx: Receiver<ObserverCommand>,
observer_events_tx: Option<crossbeam_channel::Sender<ObserverEvent>>,
ingestion_shutdown: Option<Shutdown>,
observer_sidecar: Option<ObserverSidecar>,
ctx: Context,
) -> Result<(), Box<dyn Error>> {
let mut bitcoin_block_store: HashMap<BlockIdentifier, BitcoinBlockDataCached> = HashMap::new();
let http_client = build_http_client();
let store_update_required = observer_sidecar
.as_ref()
.and_then(|s| s.bitcoin_blocks_mutator.as_ref())
.is_some();
loop {
let command = match observer_commands_rx.recv() {
Ok(cmd) => cmd,
Err(e) => {
ctx.try_log(|logger| {
slog::crit!(logger, "Error: broken channel {}", e.to_string())
});
break;
}
};
match command {
ObserverCommand::Terminate => {
break;
}
ObserverCommand::ProcessBitcoinBlock(mut block_data) => {
let block_hash = block_data.hash.to_string();
let mut attempts = 0;
let max_attempts = 10;
let block = loop {
match standardize_bitcoin_block(
block_data.clone(),
&config.bitcoin_network,
&ctx,
) {
Ok(block) => break Some(block),
Err((e, refetch_block)) => {
attempts += 1;
if attempts > max_attempts {
break None;
}
ctx.try_log(|logger| {
slog::warn!(logger, "Error standardizing block: {}", e)
});
if refetch_block {
block_data = match download_and_parse_block_with_retry(
&http_client,
&block_hash,
&config.get_bitcoin_config(),
&ctx,
)
.await
{
Ok(block) => block,
Err(e) => {
ctx.try_log(|logger| {
slog::warn!(
logger,
"unable to download_and_parse_block: {}",
e.to_string()
)
});
continue;
}
};
}
}
};
};
let Some(block) = block else {
ctx.try_log(|logger| {
slog::crit!(
logger,
"Could not process bitcoin block after {} attempts.",
attempts
)
});
break;
};
bitcoin_block_store.insert(
block.block_identifier.clone(),
BitcoinBlockDataCached {
block,
processed_by_sidecar: false,
},
);
}
ObserverCommand::CacheBitcoinBlock(block) => {
bitcoin_block_store.insert(
block.block_identifier.clone(),
BitcoinBlockDataCached {
block,
processed_by_sidecar: false,
},
);
}
ObserverCommand::PropagateBitcoinChainEvent(blockchain_event) => {
ctx.try_log(|logger| {
slog::info!(logger, "Handling PropagateBitcoinChainEvent command")
});
let mut confirmed_blocks = vec![];
// Update Chain event before propagation
let (chain_event, _) = match blockchain_event {
BlockchainEvent::BlockchainUpdatedWithHeaders(data) => {
let mut blocks_to_mutate = vec![];
let mut new_blocks = vec![];
let mut new_tip = 0;
for header in data.new_headers.iter() {
if header.block_identifier.index > new_tip {
new_tip = header.block_identifier.index;
}
if store_update_required {
let Some(block) =
bitcoin_block_store.remove(&header.block_identifier)
else {
continue;
};
blocks_to_mutate.push(block);
} else {
let Some(cache) = bitcoin_block_store.get(&header.block_identifier)
else {
continue;
};
new_blocks.push(cache.block.clone());
};
}
if let Some(ref sidecar) = observer_sidecar {
let updated_blocks = sidecar.perform_bitcoin_sidecar_mutations(
blocks_to_mutate,
vec![],
&ctx,
);
for cache in updated_blocks.into_iter() {
bitcoin_block_store
.insert(cache.block.block_identifier.clone(), cache.clone());
new_blocks.push(cache.block);
}
}
for header in data.confirmed_headers.iter() {
match bitcoin_block_store.remove(&header.block_identifier) {
Some(res) => {
confirmed_blocks.push(res.block);
}
None => {
ctx.try_log(|logger| {
slog::error!(
logger,
"Unable to retrieve confirmed bitcoin block {}",
header.block_identifier
)
});
}
}
}
(
BitcoinChainEvent::ChainUpdatedWithBlocks(
BitcoinChainUpdatedWithBlocksData {
new_blocks,
confirmed_blocks: confirmed_blocks.clone(),
},
),
new_tip,
)
}
BlockchainEvent::BlockchainUpdatedWithReorg(data) => {
let mut blocks_to_rollback = vec![];
let mut blocks_to_mutate = vec![];
let mut blocks_to_apply = vec![];
let mut new_tip = 0;
for header in data.headers_to_apply.iter() {
if header.block_identifier.index > new_tip {
new_tip = header.block_identifier.index;
}
if store_update_required {
let Some(block) =
bitcoin_block_store.remove(&header.block_identifier)
else {
continue;
};
blocks_to_mutate.push(block);
} else {
let Some(cache) = bitcoin_block_store.get(&header.block_identifier)
else {
continue;
};
blocks_to_apply.push(cache.block.clone());
};
}
let mut blocks_ids_to_rollback: Vec<BlockIdentifier> = vec![];
for header in data.headers_to_rollback.iter() {
match bitcoin_block_store.get(&header.block_identifier) {
Some(cache) => {
blocks_ids_to_rollback.push(header.block_identifier.clone());
blocks_to_rollback.push(cache.block.clone());
}
None => {
ctx.try_log(|logger| {
slog::error!(
logger,
"Unable to retrieve bitcoin block {}",
header.block_identifier
)
});
}
}
}
if let Some(ref sidecar) = observer_sidecar {
let updated_blocks = sidecar.perform_bitcoin_sidecar_mutations(
blocks_to_mutate,
blocks_ids_to_rollback,
&ctx,
);
for cache in updated_blocks.into_iter() {
bitcoin_block_store
.insert(cache.block.block_identifier.clone(), cache.clone());
blocks_to_apply.push(cache.block);
}
}
for header in data.confirmed_headers.iter() {
match bitcoin_block_store.remove(&header.block_identifier) {
Some(res) => {
confirmed_blocks.push(res.block);
}
None => {
ctx.try_log(|logger| {
slog::error!(
logger,
"Unable to retrieve confirmed bitcoin block {}",
header.block_identifier
)
});
}
}
}
(
BitcoinChainEvent::ChainUpdatedWithReorg(
BitcoinChainUpdatedWithReorgData {
blocks_to_apply,
blocks_to_rollback,
confirmed_blocks: confirmed_blocks.clone(),
},
),
new_tip,
)
}
};
if let Some(ref sidecar) = observer_sidecar {
sidecar.notify_chain_event(&chain_event, &ctx)
}
}
}
}
terminate(ingestion_shutdown, observer_events_tx, &ctx);
Ok(())
}
fn terminate(
ingestion_shutdown: Option<Shutdown>,
observer_events_tx: Option<crossbeam_channel::Sender<ObserverEvent>>,
ctx: &Context,
) {
ctx.try_log(|logger| slog::info!(logger, "Handling Termination command"));
if let Some(ingestion_shutdown) = ingestion_shutdown {
ingestion_shutdown.notify();
}
if let Some(ref tx) = observer_events_tx {
let _ = tx.send(ObserverEvent::Info("Terminating event observer".into()));
let _ = tx.send(ObserverEvent::Terminate);
}
}

View File

@@ -0,0 +1,154 @@
use chainhook_types::BitcoinBlockSignaling;
use hiro_system_kit::slog;
use std::sync::mpsc::Sender;
use zmq::Socket;
use crate::{
indexer::{
bitcoin::{build_http_client, download_and_parse_block_with_retry},
fork_scratch_pad::ForkScratchPad,
},
utils::Context,
};
use std::collections::VecDeque;
use super::{EventObserverConfig, ObserverCommand};
fn new_zmq_socket() -> Socket {
let context = zmq::Context::new();
let socket = context.socket(zmq::SUB).unwrap();
assert!(socket.set_subscribe(b"hashblock").is_ok());
assert!(socket.set_rcvhwm(0).is_ok());
// We override the OS default behavior:
assert!(socket.set_tcp_keepalive(1).is_ok());
// The keepalive routine will wait for 5 minutes
assert!(socket.set_tcp_keepalive_idle(300).is_ok());
// And then resend it every 60 seconds
assert!(socket.set_tcp_keepalive_intvl(60).is_ok());
// 120 times
assert!(socket.set_tcp_keepalive_cnt(120).is_ok());
socket
}
pub async fn start_zeromq_runloop(
config: &EventObserverConfig,
observer_commands_tx: Sender<ObserverCommand>,
ctx: &Context,
) {
let BitcoinBlockSignaling::ZeroMQ(ref bitcoind_zmq_url) = config.bitcoin_block_signaling;
let bitcoind_zmq_url = bitcoind_zmq_url.clone();
let bitcoin_config = config.get_bitcoin_config();
let http_client = build_http_client();
ctx.try_log(|logger| {
slog::info!(
logger,
"Waiting for ZMQ connection acknowledgment from bitcoind"
)
});
let mut socket = new_zmq_socket();
assert!(socket.connect(&bitcoind_zmq_url).is_ok());
ctx.try_log(|logger| slog::info!(logger, "Waiting for ZMQ messages from bitcoind"));
let mut bitcoin_blocks_pool = ForkScratchPad::new();
loop {
let msg = match socket.recv_multipart(0) {
Ok(msg) => msg,
Err(e) => {
ctx.try_log(|logger| {
slog::error!(logger, "Unable to receive ZMQ message: {}", e.to_string())
});
socket = new_zmq_socket();
assert!(socket.connect(&bitcoind_zmq_url).is_ok());
continue;
}
};
let (topic, data, _sequence) = (&msg[0], &msg[1], &msg[2]);
if !topic.eq(b"hashblock") {
ctx.try_log(|logger| slog::error!(logger, "Topic not supported",));
continue;
}
let block_hash = hex::encode(data);
ctx.try_log(|logger| slog::info!(logger, "Bitcoin block hash announced #{block_hash}",));
let mut block_hashes: VecDeque<String> = VecDeque::new();
block_hashes.push_front(block_hash);
while let Some(block_hash) = block_hashes.pop_front() {
let block = match download_and_parse_block_with_retry(
&http_client,
&block_hash,
&bitcoin_config,
ctx,
)
.await
{
Ok(block) => block,
Err(e) => {
ctx.try_log(|logger| {
slog::warn!(
logger,
"unable to download_and_parse_block: {}",
e.to_string()
)
});
continue;
}
};
let header = block.get_block_header();
ctx.try_log(|logger| {
slog::info!(
logger,
"Bitcoin block #{} dispatched for processing",
block.height
)
});
let _ = observer_commands_tx.send(ObserverCommand::ProcessBitcoinBlock(block));
if bitcoin_blocks_pool.can_process_header(&header) {
match bitcoin_blocks_pool.process_header(header, ctx) {
Ok(Some(event)) => {
let _ = observer_commands_tx
.send(ObserverCommand::PropagateBitcoinChainEvent(event));
}
Err(e) => {
ctx.try_log(|logger| {
slog::warn!(logger, "Unable to append block: {:?}", e)
});
}
Ok(None) => {
ctx.try_log(|logger| slog::warn!(logger, "Unable to append block"));
}
}
} else {
// Handle a behaviour specific to ZMQ usage in bitcoind.
// Considering a simple re-org:
// A (1) - B1 (2) - C1 (3)
// \ B2 (4) - C2 (5) - D2 (6)
// When D2 is being discovered (making A -> B2 -> C2 -> D2 the new canonical fork)
// it looks like ZMQ is only publishing D2.
// Without additional operation, we end up with a block that we can't append.
let parent_block_hash = header
.parent_block_identifier
.get_hash_bytes_str()
.to_string();
ctx.try_log(|logger| {
slog::info!(
logger,
"Possible re-org detected, retrieving parent block {parent_block_hash}"
)
});
block_hashes.push_front(block_hash);
block_hashes.push_front(parent_block_hash);
}
}
}
}

View File

@@ -0,0 +1,365 @@
use std::{
collections::{BTreeSet, VecDeque},
fs::{self, OpenOptions},
io::{Read, Write},
path::PathBuf,
};
use chainhook_types::{BitcoinBlockData, BlockHeader, BlockIdentifier};
use hiro_system_kit::slog::{self, Logger};
use reqwest::RequestBuilder;
#[derive(Clone)]
pub struct Context {
pub logger: Option<Logger>,
pub tracer: bool,
}
impl Context {
pub fn empty() -> Context {
Context {
logger: None,
tracer: false,
}
}
pub fn try_log<F>(&self, closure: F)
where
F: FnOnce(&Logger),
{
if let Some(ref logger) = self.logger {
closure(logger)
}
}
pub fn expect_logger(&self) -> &Logger {
self.logger.as_ref().unwrap()
}
}
pub trait AbstractBlock {
fn get_identifier(&self) -> &BlockIdentifier;
fn get_parent_identifier(&self) -> &BlockIdentifier;
fn get_header(&self) -> BlockHeader {
BlockHeader {
block_identifier: self.get_identifier().clone(),
parent_block_identifier: self.get_parent_identifier().clone(),
}
}
}
impl AbstractBlock for BlockHeader {
fn get_identifier(&self) -> &BlockIdentifier {
&self.block_identifier
}
fn get_parent_identifier(&self) -> &BlockIdentifier {
&self.parent_block_identifier
}
}
impl AbstractBlock for BitcoinBlockData {
fn get_identifier(&self) -> &BlockIdentifier {
&self.block_identifier
}
fn get_parent_identifier(&self) -> &BlockIdentifier {
&self.parent_block_identifier
}
}
pub async fn send_request(
request_builder: RequestBuilder,
attempts_max: u16,
attempts_interval_sec: u16,
ctx: &Context,
) -> Result<(), String> {
let mut retry = 0;
loop {
let request_builder = match request_builder.try_clone() {
Some(rb) => rb,
None => {
ctx.try_log(|logger| slog::warn!(logger, "unable to clone request builder"));
return Err("internal server error: unable to clone request builder".to_string());
}
};
let err_msg = match request_builder.send().await {
Ok(res) => {
if res.status().is_success() {
ctx.try_log(|logger| slog::debug!(logger, "Trigger {} successful", res.url()));
return Ok(());
} else {
retry += 1;
let err_msg =
format!("Trigger {} failed with status {}", res.url(), res.status());
ctx.try_log(|logger| slog::warn!(logger, "{}", err_msg));
err_msg
}
}
Err(e) => {
retry += 1;
let err_msg = format!("unable to send request {}", e);
ctx.try_log(|logger| slog::warn!(logger, "{}", err_msg));
err_msg
}
};
if retry >= attempts_max {
let msg: String = format!(
"unable to send request after several retries. most recent error: {}",
err_msg
);
ctx.try_log(|logger| slog::warn!(logger, "{}", msg));
return Err(msg);
}
std::thread::sleep(std::time::Duration::from_secs(attempts_interval_sec.into()));
}
}
pub fn file_append(path: String, bytes: Vec<u8>, ctx: &Context) -> Result<(), String> {
let mut file_path = match std::env::current_dir() {
Err(e) => {
let msg = format!("unable to retrieve current_dir {}", e);
ctx.try_log(|logger| slog::warn!(logger, "{}", msg));
return Err(msg);
}
Ok(p) => p,
};
file_path.push(path);
if !file_path.exists() {
match std::fs::File::create(&file_path) {
Ok(ref mut file) => {
let _ = file.write_all(&bytes);
}
Err(e) => {
let msg = format!("unable to create file {}: {}", file_path.display(), e);
ctx.try_log(|logger| slog::warn!(logger, "{}", msg));
return Err(msg);
}
}
}
let mut file = match OpenOptions::new()
.create(false)
.append(true)
.open(file_path)
{
Err(e) => {
let msg = format!("unable to open file {}", e);
ctx.try_log(|logger| slog::warn!(logger, "{}", msg));
return Err(msg);
}
Ok(p) => p,
};
let utf8 = match String::from_utf8(bytes) {
Ok(string) => string,
Err(e) => {
let msg = format!("unable serialize bytes as utf8 string {}", e);
ctx.try_log(|logger| slog::warn!(logger, "{}", msg));
return Err(msg);
}
};
if let Err(e) = writeln!(file, "{}", utf8) {
let msg = format!("unable to open file {}", e);
ctx.try_log(|logger| slog::warn!(logger, "{}", msg));
eprintln!("Couldn't write to file: {}", e);
return Err(msg);
}
Ok(())
}
#[derive(Debug)]
pub enum BlockHeightsError {
ExceedsMaxEntries(u64, u64),
StartLargerThanEnd,
}
pub enum BlockHeights {
BlockRange(u64, u64),
Blocks(Vec<u64>),
}
pub const MAX_BLOCK_HEIGHTS_ENTRIES: u64 = 1_000_000;
impl BlockHeights {
pub fn get_sorted_entries(&self) -> Result<VecDeque<u64>, BlockHeightsError> {
let mut entries = VecDeque::new();
match &self {
BlockHeights::BlockRange(start, end) => {
if start > end {
return Err(BlockHeightsError::StartLargerThanEnd);
}
if (end - start) > MAX_BLOCK_HEIGHTS_ENTRIES {
return Err(BlockHeightsError::ExceedsMaxEntries(
MAX_BLOCK_HEIGHTS_ENTRIES,
end - start,
));
}
for i in *start..=*end {
entries.push_back(i);
}
}
BlockHeights::Blocks(heights) => {
if heights.len() as u64 > MAX_BLOCK_HEIGHTS_ENTRIES {
return Err(BlockHeightsError::ExceedsMaxEntries(
MAX_BLOCK_HEIGHTS_ENTRIES,
heights.len() as u64,
));
}
let mut sorted_entries = heights.clone();
sorted_entries.sort();
let mut unique_sorted_entries = BTreeSet::new();
for entry in sorted_entries.into_iter() {
unique_sorted_entries.insert(entry);
}
for entry in unique_sorted_entries.into_iter() {
entries.push_back(entry)
}
}
}
Ok(entries)
}
}
#[test]
fn test_block_heights_range_construct() {
let range = BlockHeights::BlockRange(0, 10);
let mut entries = range.get_sorted_entries().unwrap();
let mut cursor = 0;
while let Some(entry) = entries.pop_front() {
assert_eq!(entry, cursor);
cursor += 1;
}
assert_eq!(11, cursor);
}
#[test]
fn test_block_heights_range_limits_entries() {
let range = BlockHeights::BlockRange(0, MAX_BLOCK_HEIGHTS_ENTRIES + 1);
match range.get_sorted_entries() {
Ok(_) => panic!("Expected block heights range to error when exceeding max entries"),
Err(e) => match e {
BlockHeightsError::ExceedsMaxEntries(_, _) => {}
BlockHeightsError::StartLargerThanEnd => {
panic!("Wrong error reported from exceeding block heights range max entries")
}
},
};
}
#[test]
fn test_block_heights_range_enforces_order() {
let range = BlockHeights::BlockRange(1, 0);
match range.get_sorted_entries() {
Ok(_) => panic!("Expected block heights range to error when exceeding max entries"),
Err(e) => match e {
BlockHeightsError::ExceedsMaxEntries(_, _) => {
panic!("Wrong error reported from supplying start/end out of order in block heights range")
}
BlockHeightsError::StartLargerThanEnd => {}
},
};
}
#[test]
fn test_block_heights_blocks_construct() {
let range = BlockHeights::Blocks(vec![0, 3, 5, 6, 6, 10, 9]);
let expected = vec![0, 3, 5, 6, 9, 10];
let entries = range.get_sorted_entries().unwrap();
for (entry, expectation) in entries.iter().zip(expected) {
assert_eq!(*entry, expectation);
}
}
#[test]
fn test_block_heights_blocks_limits_entries() {
let mut too_big = vec![];
for i in 0..MAX_BLOCK_HEIGHTS_ENTRIES + 1 {
too_big.push(i);
}
let range = BlockHeights::Blocks(too_big);
match range.get_sorted_entries() {
Ok(_) => panic!("Expected block heights blocks to error when exceeding max entries"),
Err(e) => match e {
BlockHeightsError::ExceedsMaxEntries(_, _) => {}
BlockHeightsError::StartLargerThanEnd => {
panic!("Wrong error reported from exceeding block heights blocks max entries")
}
},
};
}
pub fn read_file_content_at_path(file_path: &PathBuf) -> Result<Vec<u8>, String> {
use std::fs::File;
use std::io::BufReader;
let file = File::open(file_path.clone())
.map_err(|e| format!("unable to read file {}\n{:?}", file_path.display(), e))?;
let mut file_reader = BufReader::new(file);
let mut file_buffer = vec![];
file_reader
.read_to_end(&mut file_buffer)
.map_err(|e| format!("unable to read file {}\n{:?}", file_path.display(), e))?;
Ok(file_buffer)
}
pub fn write_file_content_at_path(file_path: &PathBuf, content: &[u8]) -> Result<(), String> {
use std::fs::File;
let mut parent_directory = file_path.clone();
parent_directory.pop();
fs::create_dir_all(&parent_directory).map_err(|e| {
format!(
"unable to create parent directory {}\n{}",
parent_directory.display(),
e
)
})?;
let mut file = File::create(file_path)
.map_err(|e| format!("unable to open file {}\n{}", file_path.display(), e))?;
file.write_all(content)
.map_err(|e| format!("unable to write file {}\n{}", file_path.display(), e))?;
Ok(())
}
// TODO: Fold these macros into one generic macro with configurable log levels.
#[macro_export]
macro_rules! try_info {
($a:expr, $tag:expr, $($args:tt)*) => {
$a.try_log(|l| slog::info!(l, $tag, $($args)*));
};
($a:expr, $tag:expr) => {
$a.try_log(|l| slog::info!(l, $tag));
};
}
#[macro_export]
macro_rules! try_debug {
($a:expr, $tag:expr, $($args:tt)*) => {
$a.try_log(|l| slog::debug!(l, $tag, $($args)*));
};
($a:expr, $tag:expr) => {
$a.try_log(|l| slog::debug!(l, $tag));
};
}
#[macro_export]
macro_rules! try_warn {
($a:expr, $tag:expr, $($args:tt)*) => {
$a.try_log(|l| slog::warn!(l, $tag, $($args)*));
};
($a:expr, $tag:expr) => {
$a.try_log(|l| slog::warn!(l, $tag));
};
}
#[macro_export]
macro_rules! try_error {
($a:expr, $tag:expr, $($args:tt)*) => {
$a.try_log(|l| slog::error!(l, $tag, $($args)*));
};
($a:expr, $tag:expr) => {
$a.try_log(|l| slog::error!(l, $tag));
};
}

View File

@@ -0,0 +1,17 @@
[package]
name = "chainhook-types"
description = "Bitcoin and Stacks data schemas, based on the Rosetta specification"
license = "MIT"
version = "1.3.8"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
bitcoin = { workspace = true }
serde = "1"
serde_json = "1"
serde_derive = "1"
strum = { version = "0.23.0", features = ["derive"] }
schemars = { version = "0.8.16", git = "https://github.com/hirosystems/schemars.git", branch = "feat-chainhook-fixes" }
hex = "0.4.3"

View File

@@ -0,0 +1,82 @@
use crate::TransactionIdentifier;
/// A transaction input, which defines old coins to be consumed
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Serialize, Deserialize)]
pub struct TxIn {
/// The reference to the previous output that is being used an an input.
pub previous_output: OutPoint,
/// The script which pushes values on the stack which will cause
/// the referenced output's script to be accepted.
pub script_sig: String,
/// The sequence number, which suggests to miners which of two
/// conflicting transactions should be preferred, or 0xFFFFFFFF
/// to ignore this feature. This is generally never used since
/// the miner behaviour cannot be enforced.
pub sequence: u32,
/// Witness data: an array of byte-arrays.
/// Note that this field is *not* (de)serialized with the rest of the TxIn in
/// Encodable/Decodable, as it is (de)serialized at the end of the full
/// Transaction. It *is* (de)serialized with the rest of the TxIn in other
/// (de)serialization routines.
pub witness: Vec<String>,
}
/// A transaction output, which defines new coins to be created from old ones.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Serialize, Deserialize)]
pub struct TxOut {
/// The value of the output, in satoshis.
pub value: u64,
/// The script which must be satisfied for the output to be spent.
pub script_pubkey: String,
}
/// A reference to a transaction output.
#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct OutPoint {
/// The referenced transaction's txid.
pub txid: TransactionIdentifier,
/// The index of the referenced output in its transaction's vout.
pub vout: u32,
/// The value of the referenced.
pub value: u64,
/// The script which must be satisfied for the output to be spent.
pub block_height: u64,
}
impl TxOut {
pub fn get_script_pubkey_bytes(&self) -> Vec<u8> {
hex::decode(&self.get_script_pubkey_hex()).expect("not provided for coinbase txs")
}
pub fn get_script_pubkey_hex(&self) -> &str {
&self.script_pubkey[2..]
}
}
/// The Witness is the data used to unlock bitcoins since the [segwit upgrade](https://github.com/bitcoin/bips/blob/master/bip-0143.mediawiki)
///
/// Can be logically seen as an array of byte-arrays `Vec<Vec<u8>>` and indeed you can convert from
/// it [`Witness::from_vec`] and convert into it [`Witness::to_vec`].
///
/// For serialization and deserialization performance it is stored internally as a single `Vec`,
/// saving some allocations.
///
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Serialize, Deserialize)]
pub struct Witness {
/// contains the witness `Vec<Vec<u8>>` serialization without the initial varint indicating the
/// number of elements (which is stored in `witness_elements`)
content: Vec<u8>,
/// Number of elements in the witness.
/// It is stored separately (instead of as VarInt in the initial part of content) so that method
/// like [`Witness::push`] doesn't have case requiring to shift the entire array
witness_elements: usize,
/// If `witness_elements > 0` it's a valid index pointing to the last witness element in `content`
/// (Including the varint specifying the length of the element)
last: usize,
/// If `witness_elements > 1` it's a valid index pointing to the second-to-last witness element in `content`
/// (Including the varint specifying the length of the element)
second_to_last: usize,
}

View File

@@ -0,0 +1,18 @@
extern crate serde;
#[macro_use]
extern crate serde_derive;
pub mod bitcoin;
mod ordinals;
mod processors;
mod rosetta;
pub use ordinals::*;
pub use processors::*;
pub use rosetta::*;
#[derive(Clone, Debug)]
pub enum Chain {
Bitcoin,
}

View File

@@ -0,0 +1,163 @@
use serde_json::Value;
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum OrdinalOperation {
InscriptionRevealed(OrdinalInscriptionRevealData),
InscriptionTransferred(OrdinalInscriptionTransferData),
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct OrdinalInscriptionTransferData {
pub ordinal_number: u64,
pub destination: OrdinalInscriptionTransferDestination,
pub satpoint_pre_transfer: String,
pub satpoint_post_transfer: String,
pub post_transfer_output_value: Option<u64>,
pub tx_index: usize,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
#[serde(tag = "type", content = "value", rename_all = "snake_case")]
pub enum OrdinalInscriptionTransferDestination {
Transferred(String),
SpentInFees,
Burnt(String),
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub enum OrdinalInscriptionCurseType {
DuplicateField,
IncompleteField,
NotAtOffsetZero,
NotInFirstInput,
Pointer,
Pushnum,
Reinscription,
Stutter,
UnrecognizedEvenField,
Generic,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct OrdinalInscriptionCharms {
pub coin: bool,
pub cursed: bool,
pub epic: bool,
pub legendary: bool,
pub lost: bool,
pub nineball: bool,
pub rare: bool,
pub reinscription: bool,
pub unbound: bool,
pub uncommon: bool,
pub vindicated: bool,
pub mythic: bool,
pub burned: bool,
pub palindrome: bool,
}
impl OrdinalInscriptionCharms {
pub fn none() -> Self {
OrdinalInscriptionCharms {
coin: false,
cursed: false,
epic: false,
legendary: false,
lost: false,
nineball: false,
rare: false,
reinscription: false,
unbound: false,
uncommon: false,
vindicated: false,
mythic: false,
burned: false,
palindrome: false,
}
}
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct OrdinalInscriptionRevealData {
pub content_bytes: String,
pub content_type: String,
pub content_length: usize,
pub inscription_number: OrdinalInscriptionNumber,
pub inscription_fee: u64,
pub inscription_output_value: u64,
pub inscription_id: String,
pub inscription_input_index: usize,
pub inscription_pointer: Option<u64>,
pub inscriber_address: Option<String>,
pub delegate: Option<String>,
pub metaprotocol: Option<String>,
pub metadata: Option<Value>,
pub parents: Vec<String>,
pub ordinal_number: u64,
pub ordinal_block_height: u64,
pub ordinal_offset: u64,
pub tx_index: usize,
pub transfers_pre_inscription: u32,
pub satpoint_post_inscription: String,
pub curse_type: Option<OrdinalInscriptionCurseType>,
pub charms: OrdinalInscriptionCharms,
}
impl OrdinalInscriptionNumber {
pub fn zero() -> Self {
OrdinalInscriptionNumber {
jubilee: 0,
classic: 0,
}
}
}
impl OrdinalInscriptionRevealData {
pub fn get_inscription_number(&self) -> i64 {
self.inscription_number.jubilee
}
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct OrdinalInscriptionNumber {
pub classic: i64,
pub jubilee: i64,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct Brc20TokenDeployData {
pub tick: String,
pub max: String,
pub lim: String,
pub dec: String,
pub address: String,
pub inscription_id: String,
pub self_mint: bool,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct Brc20BalanceData {
pub tick: String,
pub amt: String,
pub address: String,
pub inscription_id: String,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct Brc20TransferData {
pub tick: String,
pub amt: String,
pub sender_address: String,
pub receiver_address: String,
pub inscription_id: String,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum Brc20Operation {
Deploy(Brc20TokenDeployData),
Mint(Brc20BalanceData),
Transfer(Brc20BalanceData),
TransferSend(Brc20TransferData),
}

View File

@@ -0,0 +1,40 @@
use std::collections::BTreeMap;
use super::{BitcoinBlockData, BitcoinTransactionData};
use serde_json::Value as JsonValue;
pub struct ProcessedBitcoinTransaction {
pub tx: BitcoinTransactionData,
pub metadata: BTreeMap<String, JsonValue>,
}
pub struct ProcessedBitcoinBlock {
pub tx: BitcoinBlockData,
pub metadata: BTreeMap<String, JsonValue>,
}
pub enum ProcessingContext {
Scanning,
Streaming,
}
pub trait BitcoinProtocolProcessor {
fn register(&mut self);
fn process_block(
&mut self,
block: &mut ProcessedBitcoinBlock,
processing_context: ProcessingContext,
);
fn process_transaction(
&mut self,
transaction: &mut ProcessedBitcoinTransaction,
processing_context: ProcessingContext,
);
}
pub fn run_processor<P>(mut p: P)
where
P: BitcoinProtocolProcessor,
{
p.register();
}

View File

@@ -0,0 +1,462 @@
use crate::bitcoin::{TxIn, TxOut};
use crate::ordinals::OrdinalOperation;
use crate::Brc20Operation;
use schemars::JsonSchema;
use std::cmp::Ordering;
use std::fmt::Display;
use std::hash::{Hash, Hasher};
/// BlockIdentifier uniquely identifies a block in a particular network.
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct BlockIdentifier {
/// Also known as the block height.
pub index: u64,
pub hash: String,
}
impl BlockIdentifier {
pub fn get_hash_bytes_str(&self) -> &str {
&self.hash[2..]
}
pub fn get_hash_bytes(&self) -> Vec<u8> {
hex::decode(&self.get_hash_bytes_str()).unwrap()
}
}
impl Display for BlockIdentifier {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"Block #{} ({}...{})",
self.index,
&self.hash.as_str()[0..6],
&self.hash.as_str()[62..]
)
}
}
impl Hash for BlockIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.hash.hash(state);
}
}
impl Ord for BlockIdentifier {
fn cmp(&self, other: &Self) -> Ordering {
(other.index, &other.hash).cmp(&(self.index, &self.hash))
}
}
impl PartialOrd for BlockIdentifier {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(other.cmp(self))
}
}
impl PartialEq for BlockIdentifier {
fn eq(&self, other: &Self) -> bool {
self.hash == other.hash
}
}
impl Eq for BlockIdentifier {}
/// BitcoinBlock contain an array of Transactions that occurred at a particular
/// BlockIdentifier. A hard requirement for blocks returned by Rosetta
/// implementations is that they MUST be _inalterable_: once a client has
/// requested and received a block identified by a specific BlockIndentifier,
/// all future calls for that same BlockIdentifier must return the same block
/// contents.
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct BitcoinBlockData {
pub block_identifier: BlockIdentifier,
pub parent_block_identifier: BlockIdentifier,
/// The timestamp of the block in milliseconds since the Unix Epoch. The
/// timestamp is stored in milliseconds because some blockchains produce
/// blocks more often than once a second.
pub timestamp: u32,
pub transactions: Vec<BitcoinTransactionData>,
pub metadata: BitcoinBlockMetadata,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct BitcoinBlockMetadata {
pub network: BitcoinNetwork,
}
/// The timestamp of the block in milliseconds since the Unix Epoch. The
/// timestamp is stored in milliseconds because some blockchains produce blocks
/// more often than once a second.
#[derive(Debug, Clone, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct Timestamp(i64);
/// Transactions contain an array of Operations that are attributable to the
/// same TransactionIdentifier.
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct BitcoinTransactionData {
pub transaction_identifier: TransactionIdentifier,
pub operations: Vec<Operation>,
/// Transactions that are related to other transactions should include the
/// transaction_identifier of these transactions in the metadata.
pub metadata: BitcoinTransactionMetadata,
}
/// Extra data for Transaction
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct BitcoinTransactionMetadata {
pub inputs: Vec<TxIn>,
pub outputs: Vec<TxOut>,
pub ordinal_operations: Vec<OrdinalOperation>,
pub brc20_operation: Option<Brc20Operation>,
pub proof: Option<String>,
pub fee: u64,
pub index: u32,
}
/// The transaction_identifier uniquely identifies a transaction in a particular
/// network and block or in the mempool.
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize, Hash, PartialOrd, Ord)]
pub struct TransactionIdentifier {
/// Any transactions that are attributable only to a block (ex: a block
/// event) should use the hash of the block as the identifier.
pub hash: String,
}
impl TransactionIdentifier {
pub fn new(txid: &str) -> Self {
let lowercased_txid = txid.to_lowercase();
Self {
hash: match lowercased_txid.starts_with("0x") {
true => lowercased_txid,
false => format!("0x{}", lowercased_txid),
},
}
}
pub fn get_hash_bytes_str(&self) -> &str {
&self.hash[2..]
}
pub fn get_hash_bytes(&self) -> Vec<u8> {
hex::decode(&self.get_hash_bytes_str()).unwrap()
}
pub fn get_8_hash_bytes(&self) -> [u8; 8] {
let bytes = self.get_hash_bytes();
[
bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7],
]
}
}
#[derive(
Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, strum::EnumIter, strum::IntoStaticStr,
)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum OperationType {
Credit,
Debit,
Lock,
}
#[derive(Debug, Clone, Default, PartialEq, Deserialize, Serialize)]
pub struct OperationMetadata {
/// Has to be specified for ADD_KEY, REMOVE_KEY, and STAKE operations
#[serde(skip_serializing_if = "Option::is_none")]
pub public_key: Option<PublicKey>,
// TODO(lgalabru): ???
//#[serde(skip_serializing_if = "Option::is_none")]
// pub access_key: Option<TODO>,
/// Has to be specified for DEPLOY_CONTRACT operation
#[serde(skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
/// Has to be specified for FUNCTION_CALL operation
#[serde(skip_serializing_if = "Option::is_none")]
pub method_name: Option<String>,
/// Has to be specified for FUNCTION_CALL operation
#[serde(skip_serializing_if = "Option::is_none")]
pub args: Option<String>,
}
/// PublicKey contains a public key byte array for a particular CurveType
/// encoded in hex. Note that there is no PrivateKey struct as this is NEVER the
/// concern of an implementation.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct PublicKey {
/// Hex-encoded public key bytes in the format specified by the CurveType.
pub hex_bytes: Option<String>,
pub curve_type: CurveType,
}
/// CurveType is the type of cryptographic curve associated with a PublicKey.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum CurveType {
/// `y (255-bits) || x-sign-bit (1-bit)` - `32 bytes` (<https://ed25519.cr.yp.to/ed25519-20110926.pdf>)
Edwards25519,
/// SEC compressed - `33 bytes` (<https://secg.org/sec1-v2.pdf#subsubsection.2.3.3>)
Secp256k1,
}
/// Operations contain all balance-changing information within a transaction.
/// They are always one-sided (only affect 1 AccountIdentifier) and can
/// succeed or fail independently from a Transaction.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Operation {
pub operation_identifier: OperationIdentifier,
/// Restrict referenced related_operations to identifier indexes < the
/// current operation_identifier.index. This ensures there exists a clear
/// DAG-structure of relations. Since operations are one-sided, one could
/// imagine relating operations in a single transfer or linking operations
/// in a call tree.
#[serde(skip_serializing_if = "Option::is_none")]
pub related_operations: Option<Vec<OperationIdentifier>>,
/// The network-specific type of the operation. Ensure that any type that
/// can be returned here is also specified in the NetworkStatus. This can
/// be very useful to downstream consumers that parse all block data.
#[serde(rename = "type")]
pub type_: OperationType,
/// The network-specific status of the operation. Status is not defined on
/// the transaction object because blockchains with smart contracts may have
/// transactions that partially apply. Blockchains with atomic transactions
/// (all operations succeed or all operations fail) will have the same
/// status for each operation.
#[serde(skip_serializing_if = "Option::is_none")]
pub status: Option<OperationStatusKind>,
pub account: AccountIdentifier,
#[serde(skip_serializing_if = "Option::is_none")]
pub amount: Option<Amount>,
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata: Option<OperationMetadata>,
}
/// The operation_identifier uniquely identifies an operation within a
/// transaction.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct OperationIdentifier {
/// The operation index is used to ensure each operation has a unique
/// identifier within a transaction. This index is only relative to the
/// transaction and NOT GLOBAL. The operations in each transaction should
/// start from index 0. To clarify, there may not be any notion of an
/// operation index in the blockchain being described.
pub index: u32,
/// Some blockchains specify an operation index that is essential for
/// client use. For example, Bitcoin uses a network_index to identify
/// which UTXO was used in a transaction. network_index should not be
/// populated if there is no notion of an operation index in a blockchain
/// (typically most account-based blockchains).
#[serde(skip_serializing_if = "Option::is_none")]
pub network_index: Option<i64>,
}
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize, strum::EnumIter)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum OperationStatusKind {
Success,
}
/// The account_identifier uniquely identifies an account within a network. All
/// fields in the account_identifier are utilized to determine this uniqueness
/// (including the metadata field, if populated).
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub struct AccountIdentifier {
/// The address may be a cryptographic public key (or some encoding of it)
/// or a provided username.
pub address: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub sub_account: Option<SubAccountIdentifier>,
/* Rosetta Spec also optionally provides:
*
* /// Blockchains that utilize a username model (where the address is not a
* /// derivative of a cryptographic public key) should specify the public
* /// key(s) owned by the address in metadata.
* #[serde(skip_serializing_if = "Option::is_none")]
* pub metadata: Option<serde_json::Value>, */
}
/// An account may have state specific to a contract address (ERC-20 token)
/// and/or a stake (delegated balance). The sub_account_identifier should
/// specify which state (if applicable) an account instantiation refers to.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub struct SubAccountIdentifier {
/// The SubAccount address may be a cryptographic value or some other
/// identifier (ex: bonded) that uniquely specifies a SubAccount.
pub address: SubAccount,
/* Rosetta Spec also optionally provides:
*
* /// If the SubAccount address is not sufficient to uniquely specify a
* /// SubAccount, any other identifying information can be stored here. It is
* /// important to note that two SubAccounts with identical addresses but
* /// differing metadata will not be considered equal by clients.
* #[serde(skip_serializing_if = "Option::is_none")]
* pub metadata: Option<serde_json::Value>, */
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum SubAccount {
LiquidBalanceForStorage,
Locked,
}
/// Amount is some Value of a Currency. It is considered invalid to specify a
/// Value without a Currency.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Amount {
/// Value of the transaction in atomic units represented as an
/// arbitrary-sized signed integer. For example, 1 BTC would be represented
/// by a value of 100000000.
pub value: u128,
pub currency: Currency,
/* Rosetta Spec also optionally provides:
*
* #[serde(skip_serializing_if = "Option::is_none")]
* pub metadata: Option<serde_json::Value>, */
}
/// Currency is composed of a canonical Symbol and Decimals. This Decimals value
/// is used to convert an Amount.Value from atomic units (Satoshis) to standard
/// units (Bitcoins).
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Currency {
/// Canonical symbol associated with a currency.
pub symbol: String,
/// Number of decimal places in the standard unit representation of the
/// amount. For example, BTC has 8 decimals. Note that it is not possible
/// to represent the value of some currency in atomic units that is not base
/// 10.
pub decimals: u32,
/// Any additional information related to the currency itself.
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata: Option<CurrencyMetadata>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum CurrencyStandard {
Sip09,
Sip10,
None,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct CurrencyMetadata {
pub asset_class_identifier: String,
pub asset_identifier: Option<String>,
pub standard: CurrencyStandard,
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq, Serialize)]
pub enum BlockchainEvent {
BlockchainUpdatedWithHeaders(BlockchainUpdatedWithHeaders),
BlockchainUpdatedWithReorg(BlockchainUpdatedWithReorg),
}
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct BlockchainUpdatedWithHeaders {
pub new_headers: Vec<BlockHeader>,
pub confirmed_headers: Vec<BlockHeader>,
}
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct BlockchainUpdatedWithReorg {
pub headers_to_rollback: Vec<BlockHeader>,
pub headers_to_apply: Vec<BlockHeader>,
pub confirmed_headers: Vec<BlockHeader>,
}
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct BlockHeader {
pub block_identifier: BlockIdentifier,
pub parent_block_identifier: BlockIdentifier,
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq, Serialize)]
pub enum BitcoinChainEvent {
ChainUpdatedWithBlocks(BitcoinChainUpdatedWithBlocksData),
ChainUpdatedWithReorg(BitcoinChainUpdatedWithReorgData),
}
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct BitcoinChainUpdatedWithBlocksData {
pub new_blocks: Vec<BitcoinBlockData>,
pub confirmed_blocks: Vec<BitcoinBlockData>,
}
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct BitcoinChainUpdatedWithReorgData {
pub blocks_to_rollback: Vec<BitcoinBlockData>,
pub blocks_to_apply: Vec<BitcoinBlockData>,
pub confirmed_blocks: Vec<BitcoinBlockData>,
}
#[allow(dead_code)]
#[derive(
Debug, PartialEq, Eq, Clone, PartialOrd, Ord, Hash, Serialize, Deserialize, JsonSchema,
)]
#[serde(rename_all = "snake_case")]
pub enum BitcoinNetwork {
Regtest,
Testnet,
Signet,
Mainnet,
}
impl std::fmt::Display for BitcoinNetwork {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl BitcoinNetwork {
pub fn from_str(network: &str) -> Result<BitcoinNetwork, String> {
let value = match network {
"regtest" => BitcoinNetwork::Regtest,
"testnet" => BitcoinNetwork::Testnet,
"mainnet" => BitcoinNetwork::Mainnet,
"signet" => BitcoinNetwork::Signet,
_ => {
return Err(format!(
"network '{}' unsupported (mainnet, testnet, regtest, signet)",
network
))
}
};
Ok(value)
}
pub fn as_str(&self) -> &str {
match self {
BitcoinNetwork::Regtest => "regtest",
BitcoinNetwork::Testnet => "testnet",
BitcoinNetwork::Mainnet => "mainnet",
BitcoinNetwork::Signet => "signet",
}
}
}
#[derive(Deserialize, Debug, Clone, PartialEq)]
pub enum BitcoinBlockSignaling {
ZeroMQ(String),
}
impl BitcoinBlockSignaling {
pub fn is_bitcoind_zmq_block_signaling_expected(&self) -> bool {
match &self {
_ => false,
}
}
}

13
components/ord/Cargo.toml Normal file
View File

@@ -0,0 +1,13 @@
[package]
name = "ord"
version = "0.22.2"
edition = "2021"
[dependencies]
anyhow = { version = "1.0.56", features = ["backtrace"] }
bitcoin = { workspace = true }
chainhook-sdk = { path = "../chainhook-sdk" }
ciborium = "0.2.1"
serde = "1"
serde_derive = "1"
serde_json = "1"

1
components/ord/README.md Normal file
View File

@@ -0,0 +1 @@
This code is manually imported from [ordinals/ord](https://github.com/ordinals/ord) and it is used for all ordinal inscription parsing.

153
components/ord/src/chain.rs Normal file
View File

@@ -0,0 +1,153 @@
#[derive(Default, Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum Chain {
#[default]
Mainnet,
Regtest,
Signet,
Testnet,
Testnet4,
}
impl Chain {
// pub(crate) fn network(self) -> Network {
// self.into()
// }
// pub(crate) fn bech32_hrp(self) -> KnownHrp {
// match self {
// Self::Mainnet => KnownHrp::Mainnet,
// Self::Regtest => KnownHrp::Regtest,
// Self::Signet | Self::Testnet | Self::Testnet4 => KnownHrp::Testnets,
// }
// }
// pub(crate) fn default_rpc_port(self) -> u16 {
// match self {
// Self::Mainnet => 8332,
// Self::Regtest => 18443,
// Self::Signet => 38332,
// Self::Testnet => 18332,
// Self::Testnet4 => 48332,
// }
// }
pub(crate) fn inscription_content_size_limit(self) -> Option<usize> {
match self {
Self::Mainnet | Self::Regtest => None,
Self::Testnet | Self::Testnet4 | Self::Signet => Some(1024),
}
}
pub(crate) fn first_inscription_height(self) -> u32 {
match self {
Self::Mainnet => 767430,
Self::Regtest => 0,
Self::Signet => 112402,
Self::Testnet => 2413343,
Self::Testnet4 => 0,
}
}
// pub(crate) fn first_rune_height(self) -> u32 {
// Rune::first_rune_height(self.into())
// }
pub(crate) fn jubilee_height(self) -> u32 {
match self {
Self::Mainnet => 824544,
Self::Regtest => 110,
Self::Signet => 175392,
Self::Testnet => 2544192,
Self::Testnet4 => 0,
}
}
// pub(crate) fn genesis_block(self) -> Block {
// chainhook_sdk::bitcoin::blockdata::constants::genesis_block(self.network())
// }
// pub(crate) fn genesis_coinbase_outpoint(self) -> OutPoint {
// OutPoint {
// txid: self.genesis_block().coinbase().unwrap().compute_txid(),
// vout: 0,
// }
// }
// pub(crate) fn address_from_script(self, script: &Script) -> Result<Address, SnafuError> {
// Address::from_script(script, self.network()).snafu_context(error::AddressConversion)
// }
// pub(crate) fn join_with_data_dir(self, data_dir: impl AsRef<Path>) -> PathBuf {
// match self {
// Self::Mainnet => data_dir.as_ref().to_owned(),
// Self::Regtest => data_dir.as_ref().join("regtest"),
// Self::Signet => data_dir.as_ref().join("signet"),
// Self::Testnet => data_dir.as_ref().join("testnet3"),
// Self::Testnet4 => data_dir.as_ref().join("testnet4"),
// }
// }
}
// impl From<Chain> for Network {
// fn from(chain: Chain) -> Network {
// match chain {
// Chain::Mainnet => Network::Bitcoin,
// Chain::Regtest => Network::Regtest,
// Chain::Signet => Network::Signet,
// Chain::Testnet => Network::Testnet,
// Chain::Testnet4 => Network::Testnet4,
// }
// }
// }
// impl Display for Chain {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(
// f,
// "{}",
// match self {
// Self::Mainnet => "mainnet",
// Self::Regtest => "regtest",
// Self::Signet => "signet",
// Self::Testnet => "testnet",
// Self::Testnet4 => "testnet4",
// }
// )
// }
// }
// impl FromStr for Chain {
// type Err = SnafuError;
// fn from_str(s: &str) -> Result<Self, Self::Err> {
// match s {
// "mainnet" => Ok(Self::Mainnet),
// "regtest" => Ok(Self::Regtest),
// "signet" => Ok(Self::Signet),
// "testnet" => Ok(Self::Testnet),
// "testnet4" => Ok(Self::Testnet4),
// _ => Err(SnafuError::InvalidChain {
// chain: s.to_string(),
// }),
// }
// }
// }
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn from_str() {
// assert_eq!("mainnet".parse::<Chain>().unwrap(), Chain::Mainnet);
// assert_eq!("regtest".parse::<Chain>().unwrap(), Chain::Regtest);
// assert_eq!("signet".parse::<Chain>().unwrap(), Chain::Signet);
// assert_eq!("testnet".parse::<Chain>().unwrap(), Chain::Testnet);
// assert_eq!("testnet4".parse::<Chain>().unwrap(), Chain::Testnet4);
// assert_eq!(
// "foo".parse::<Chain>().unwrap_err().to_string(),
// "Invalid chain `foo`"
// );
// }
// }

167
components/ord/src/charm.rs Normal file
View File

@@ -0,0 +1,167 @@
use std::{
fmt::{self, Display, Formatter},
str::FromStr,
};
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Charm {
Coin = 0,
Cursed = 1,
Epic = 2,
Legendary = 3,
Lost = 4,
Nineball = 5,
Rare = 6,
Reinscription = 7,
Unbound = 8,
Uncommon = 9,
Vindicated = 10,
Mythic = 11,
Burned = 12,
Palindrome = 13,
}
impl Charm {
pub const ALL: [Self; 14] = [
Self::Coin,
Self::Uncommon,
Self::Rare,
Self::Epic,
Self::Legendary,
Self::Mythic,
Self::Nineball,
Self::Palindrome,
Self::Reinscription,
Self::Cursed,
Self::Unbound,
Self::Lost,
Self::Vindicated,
Self::Burned,
];
pub fn flag(self) -> u16 {
1 << self as u16
}
pub fn set(self, charms: &mut u16) {
*charms |= self.flag();
}
pub fn is_set(self, charms: u16) -> bool {
charms & self.flag() != 0
}
pub fn unset(self, charms: u16) -> u16 {
charms & !self.flag()
}
pub fn icon(self) -> &'static str {
match self {
Self::Burned => "🔥",
Self::Coin => "🪙",
Self::Cursed => "👹",
Self::Epic => "🪻",
Self::Legendary => "🌝",
Self::Lost => "🤔",
Self::Mythic => "🎃",
Self::Nineball => "\u{39}\u{fe0f}\u{20e3}",
Self::Palindrome => "🦋",
Self::Rare => "🧿",
Self::Reinscription => "♻️",
Self::Unbound => "🔓",
Self::Uncommon => "🌱",
Self::Vindicated => "\u{2764}\u{fe0f}\u{200d}\u{1f525}",
}
}
pub fn charms(charms: u16) -> Vec<Charm> {
Self::ALL
.into_iter()
.filter(|charm| charm.is_set(charms))
.collect()
}
}
impl Display for Charm {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Self::Burned => "burned",
Self::Coin => "coin",
Self::Cursed => "cursed",
Self::Epic => "epic",
Self::Legendary => "legendary",
Self::Lost => "lost",
Self::Mythic => "mythic",
Self::Nineball => "nineball",
Self::Palindrome => "palindrome",
Self::Rare => "rare",
Self::Reinscription => "reinscription",
Self::Unbound => "unbound",
Self::Uncommon => "uncommon",
Self::Vindicated => "vindicated",
}
)
}
}
impl FromStr for Charm {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"burned" => Self::Burned,
"coin" => Self::Coin,
"cursed" => Self::Cursed,
"epic" => Self::Epic,
"legendary" => Self::Legendary,
"lost" => Self::Lost,
"mythic" => Self::Mythic,
"nineball" => Self::Nineball,
"palindrome" => Self::Palindrome,
"rare" => Self::Rare,
"reinscription" => Self::Reinscription,
"unbound" => Self::Unbound,
"uncommon" => Self::Uncommon,
"vindicated" => Self::Vindicated,
_ => return Err(format!("invalid charm `{s}`")),
})
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn flag() {
// assert_eq!(Charm::Coin.flag(), 0b1);
// assert_eq!(Charm::Cursed.flag(), 0b10);
// }
// #[test]
// fn set() {
// let mut flags = 0;
// assert!(!Charm::Coin.is_set(flags));
// Charm::Coin.set(&mut flags);
// assert!(Charm::Coin.is_set(flags));
// }
// #[test]
// fn unset() {
// let mut flags = 0;
// Charm::Coin.set(&mut flags);
// assert!(Charm::Coin.is_set(flags));
// let flags = Charm::Coin.unset(flags);
// assert!(!Charm::Coin.is_set(flags));
// }
// #[test]
// fn from_str() {
// for charm in Charm::ALL {
// assert_eq!(charm.to_string().parse::<Charm>().unwrap(), charm);
// }
// }
// }

View File

@@ -0,0 +1,52 @@
use super::{height::Height, sat::Sat};
#[derive(PartialEq, Debug)]
pub struct DecimalSat {
pub height: Height,
pub offset: u64,
}
impl From<Sat> for DecimalSat {
fn from(sat: Sat) -> Self {
Self {
height: sat.height(),
offset: sat.third(),
}
}
}
// impl Display for DecimalSat {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(f, "{}.{}", self.height, self.offset)
// }
// }
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn decimal() {
// assert_eq!(
// Sat(0).decimal(),
// DecimalSat {
// height: Height(0),
// offset: 0
// }
// );
// assert_eq!(
// Sat(1).decimal(),
// DecimalSat {
// height: Height(0),
// offset: 1
// }
// );
// assert_eq!(
// Sat(2099999997689999).decimal(),
// DecimalSat {
// height: Height(6929999),
// offset: 0
// }
// );
// }
// }

View File

@@ -1,35 +1,33 @@
use sat::Sat;
use super::*;
use super::{sat::Sat, *};
#[derive(PartialEq, Debug)]
pub struct Degree {
pub hour: u32,
pub minute: u32,
pub second: u32,
pub third: u64,
pub hour: u32,
pub minute: u32,
pub second: u32,
pub third: u64,
}
// impl Display for Degree {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(
// f,
// "{}°{}{}″{}‴",
// self.hour, self.minute, self.second, self.third
// )
// }
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(
// f,
// "{}°{}{}″{}‴",
// self.hour, self.minute, self.second, self.third
// )
// }
// }
impl From<Sat> for Degree {
fn from(sat: Sat) -> Self {
let height = sat.height().n();
Degree {
hour: (height / (CYCLE_EPOCHS * SUBSIDY_HALVING_INTERVAL)) as u32,
minute: (height % SUBSIDY_HALVING_INTERVAL) as u32,
second: (height % DIFFCHANGE_INTERVAL) as u32,
third: sat.third(),
fn from(sat: Sat) -> Self {
let height = sat.height().n();
Degree {
hour: height / (CYCLE_EPOCHS * SUBSIDY_HALVING_INTERVAL),
minute: height % SUBSIDY_HALVING_INTERVAL,
second: height % DIFFCHANGE_INTERVAL,
third: sat.third(),
}
}
}
}
// #[cfg(test)]

File diff suppressed because it is too large Load Diff

242
components/ord/src/epoch.rs Normal file
View File

@@ -0,0 +1,242 @@
use super::{height::Height, sat::Sat, *};
#[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, PartialOrd)]
pub struct Epoch(pub u32);
impl Epoch {
pub const STARTING_SATS: [Sat; 34] = [
Sat(0),
Sat(1050000000000000),
Sat(1575000000000000),
Sat(1837500000000000),
Sat(1968750000000000),
Sat(2034375000000000),
Sat(2067187500000000),
Sat(2083593750000000),
Sat(2091796875000000),
Sat(2095898437500000),
Sat(2097949218750000),
Sat(2098974609270000),
Sat(2099487304530000),
Sat(2099743652160000),
Sat(2099871825870000),
Sat(2099935912620000),
Sat(2099967955890000),
Sat(2099983977420000),
Sat(2099991988080000),
Sat(2099995993410000),
Sat(2099997995970000),
Sat(2099998997250000),
Sat(2099999497890000),
Sat(2099999748210000),
Sat(2099999873370000),
Sat(2099999935950000),
Sat(2099999967240000),
Sat(2099999982780000),
Sat(2099999990550000),
Sat(2099999994330000),
Sat(2099999996220000),
Sat(2099999997060000),
Sat(2099999997480000),
Sat(Sat::SUPPLY),
];
pub const FIRST_POST_SUBSIDY: Epoch = Self(33);
pub fn subsidy(self) -> u64 {
if self < Self::FIRST_POST_SUBSIDY {
(50 * COIN_VALUE) >> self.0
} else {
0
}
}
pub fn starting_sat(self) -> Sat {
*Self::STARTING_SATS
.get(usize::try_from(self.0).unwrap())
.unwrap_or_else(|| Self::STARTING_SATS.last().unwrap())
}
pub fn starting_height(self) -> Height {
Height(self.0 * SUBSIDY_HALVING_INTERVAL)
}
}
impl PartialEq<u32> for Epoch {
fn eq(&self, other: &u32) -> bool {
self.0 == *other
}
}
impl From<Sat> for Epoch {
fn from(sat: Sat) -> Self {
if sat < Self::STARTING_SATS[1] {
Epoch(0)
} else if sat < Self::STARTING_SATS[2] {
Epoch(1)
} else if sat < Self::STARTING_SATS[3] {
Epoch(2)
} else if sat < Self::STARTING_SATS[4] {
Epoch(3)
} else if sat < Self::STARTING_SATS[5] {
Epoch(4)
} else if sat < Self::STARTING_SATS[6] {
Epoch(5)
} else if sat < Self::STARTING_SATS[7] {
Epoch(6)
} else if sat < Self::STARTING_SATS[8] {
Epoch(7)
} else if sat < Self::STARTING_SATS[9] {
Epoch(8)
} else if sat < Self::STARTING_SATS[10] {
Epoch(9)
} else if sat < Self::STARTING_SATS[11] {
Epoch(10)
} else if sat < Self::STARTING_SATS[12] {
Epoch(11)
} else if sat < Self::STARTING_SATS[13] {
Epoch(12)
} else if sat < Self::STARTING_SATS[14] {
Epoch(13)
} else if sat < Self::STARTING_SATS[15] {
Epoch(14)
} else if sat < Self::STARTING_SATS[16] {
Epoch(15)
} else if sat < Self::STARTING_SATS[17] {
Epoch(16)
} else if sat < Self::STARTING_SATS[18] {
Epoch(17)
} else if sat < Self::STARTING_SATS[19] {
Epoch(18)
} else if sat < Self::STARTING_SATS[20] {
Epoch(19)
} else if sat < Self::STARTING_SATS[21] {
Epoch(20)
} else if sat < Self::STARTING_SATS[22] {
Epoch(21)
} else if sat < Self::STARTING_SATS[23] {
Epoch(22)
} else if sat < Self::STARTING_SATS[24] {
Epoch(23)
} else if sat < Self::STARTING_SATS[25] {
Epoch(24)
} else if sat < Self::STARTING_SATS[26] {
Epoch(25)
} else if sat < Self::STARTING_SATS[27] {
Epoch(26)
} else if sat < Self::STARTING_SATS[28] {
Epoch(27)
} else if sat < Self::STARTING_SATS[29] {
Epoch(28)
} else if sat < Self::STARTING_SATS[30] {
Epoch(29)
} else if sat < Self::STARTING_SATS[31] {
Epoch(30)
} else if sat < Self::STARTING_SATS[32] {
Epoch(31)
} else if sat < Self::STARTING_SATS[33] {
Epoch(32)
} else {
Epoch(33)
}
}
}
impl From<Height> for Epoch {
fn from(height: Height) -> Self {
Self(height.0 / SUBSIDY_HALVING_INTERVAL)
}
}
// #[cfg(test)]
// mod tests {
// use super::super::*;
// #[test]
// fn starting_sat() {
// assert_eq!(Epoch(0).starting_sat(), 0);
// assert_eq!(
// Epoch(1).starting_sat(),
// Epoch(0).subsidy() * u64::from(SUBSIDY_HALVING_INTERVAL)
// );
// assert_eq!(
// Epoch(2).starting_sat(),
// (Epoch(0).subsidy() + Epoch(1).subsidy()) * u64::from(SUBSIDY_HALVING_INTERVAL)
// );
// assert_eq!(Epoch(33).starting_sat(), Sat(Sat::SUPPLY));
// assert_eq!(Epoch(34).starting_sat(), Sat(Sat::SUPPLY));
// }
// #[test]
// fn starting_sats() {
// let mut sat = 0;
// let mut epoch_sats = Vec::new();
// for epoch in 0..34 {
// epoch_sats.push(sat);
// sat += u64::from(SUBSIDY_HALVING_INTERVAL) * Epoch(epoch).subsidy();
// }
// assert_eq!(Epoch::STARTING_SATS.as_slice(), epoch_sats);
// assert_eq!(Epoch::STARTING_SATS.len(), 34);
// }
// #[test]
// fn subsidy() {
// assert_eq!(Epoch(0).subsidy(), 5000000000);
// assert_eq!(Epoch(1).subsidy(), 2500000000);
// assert_eq!(Epoch(32).subsidy(), 1);
// assert_eq!(Epoch(33).subsidy(), 0);
// }
// #[test]
// fn starting_height() {
// assert_eq!(Epoch(0).starting_height(), 0);
// assert_eq!(Epoch(1).starting_height(), SUBSIDY_HALVING_INTERVAL);
// assert_eq!(Epoch(2).starting_height(), SUBSIDY_HALVING_INTERVAL * 2);
// }
// #[test]
// fn from_height() {
// assert_eq!(Epoch::from(Height(0)), 0);
// assert_eq!(Epoch::from(Height(SUBSIDY_HALVING_INTERVAL)), 1);
// assert_eq!(Epoch::from(Height(SUBSIDY_HALVING_INTERVAL) + 1), 1);
// }
// #[test]
// fn from_sat() {
// for (epoch, starting_sat) in Epoch::STARTING_SATS.into_iter().enumerate() {
// if epoch > 0 {
// assert_eq!(
// Epoch::from(Sat(starting_sat.n() - 1)),
// Epoch(u32::try_from(epoch).unwrap() - 1)
// );
// }
// assert_eq!(
// Epoch::from(starting_sat),
// Epoch(u32::try_from(epoch).unwrap())
// );
// assert_eq!(
// Epoch::from(starting_sat + 1),
// Epoch(u32::try_from(epoch).unwrap())
// );
// }
// assert_eq!(Epoch::from(Sat(0)), 0);
// assert_eq!(Epoch::from(Sat(1)), 0);
// assert_eq!(Epoch::from(Epoch(1).starting_sat()), 1);
// assert_eq!(Epoch::from(Epoch(1).starting_sat() + 1), 1);
// assert_eq!(Epoch::from(Sat(u64::MAX)), 33);
// }
// #[test]
// fn eq() {
// assert_eq!(Epoch(0), 0);
// assert_eq!(Epoch(100), 100);
// }
// #[test]
// fn first_post_subsidy() {
// assert_eq!(Epoch::FIRST_POST_SUBSIDY.subsidy(), 0);
// assert!(Epoch(Epoch::FIRST_POST_SUBSIDY.0 - 1).subsidy() > 0);
// }
// }

View File

@@ -0,0 +1,124 @@
use std::ops::Add;
use super::{epoch::Epoch, sat::Sat, *};
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Serialize)]
pub struct Height(pub u32);
impl Height {
pub fn n(self) -> u32 {
self.0
}
pub fn subsidy(self) -> u64 {
Epoch::from(self).subsidy()
}
pub fn starting_sat(self) -> Sat {
let epoch = Epoch::from(self);
let epoch_starting_sat = epoch.starting_sat();
let epoch_starting_height = epoch.starting_height();
epoch_starting_sat + u64::from(self.n() - epoch_starting_height.n()) * epoch.subsidy()
}
pub fn period_offset(self) -> u32 {
self.0 % DIFFCHANGE_INTERVAL
}
}
impl Add<u32> for Height {
type Output = Self;
fn add(self, other: u32) -> Height {
Self(self.0 + other)
}
}
// impl Sub<u32> for Height {
// type Output = Self;
// fn sub(self, other: u32) -> Height {
// Self(self.0 - other)
// }
// }
// impl PartialEq<u32> for Height {
// fn eq(&self, other: &u32) -> bool {
// self.0 == *other
// }
// }
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn n() {
// assert_eq!(Height(0).n(), 0);
// assert_eq!(Height(1).n(), 1);
// }
// #[test]
// fn add() {
// assert_eq!(Height(0) + 1, 1);
// assert_eq!(Height(1) + 100, 101);
// }
// #[test]
// fn sub() {
// assert_eq!(Height(1) - 1, 0);
// assert_eq!(Height(100) - 50, 50);
// }
// #[test]
// fn eq() {
// assert_eq!(Height(0), 0);
// assert_eq!(Height(100), 100);
// }
// #[test]
// fn from_str() {
// assert_eq!("0".parse::<Height>().unwrap(), 0);
// assert!("foo".parse::<Height>().is_err());
// }
// #[test]
// fn subsidy() {
// assert_eq!(Height(0).subsidy(), 5000000000);
// assert_eq!(Height(1).subsidy(), 5000000000);
// assert_eq!(Height(SUBSIDY_HALVING_INTERVAL - 1).subsidy(), 5000000000);
// assert_eq!(Height(SUBSIDY_HALVING_INTERVAL).subsidy(), 2500000000);
// assert_eq!(Height(SUBSIDY_HALVING_INTERVAL + 1).subsidy(), 2500000000);
// }
// #[test]
// fn starting_sat() {
// assert_eq!(Height(0).starting_sat(), 0);
// assert_eq!(Height(1).starting_sat(), 5000000000);
// assert_eq!(
// Height(SUBSIDY_HALVING_INTERVAL - 1).starting_sat(),
// (u64::from(SUBSIDY_HALVING_INTERVAL) - 1) * 5000000000
// );
// assert_eq!(
// Height(SUBSIDY_HALVING_INTERVAL).starting_sat(),
// u64::from(SUBSIDY_HALVING_INTERVAL) * 5000000000
// );
// assert_eq!(
// Height(SUBSIDY_HALVING_INTERVAL + 1).starting_sat(),
// u64::from(SUBSIDY_HALVING_INTERVAL) * 5000000000 + 2500000000
// );
// assert_eq!(
// Height(u32::MAX).starting_sat(),
// *Epoch::STARTING_SATS.last().unwrap()
// );
// }
// #[test]
// fn period_offset() {
// assert_eq!(Height(0).period_offset(), 0);
// assert_eq!(Height(1).period_offset(), 1);
// assert_eq!(Height(DIFFCHANGE_INTERVAL - 1).period_offset(), 2015);
// assert_eq!(Height(DIFFCHANGE_INTERVAL).period_offset(), 0);
// assert_eq!(Height(DIFFCHANGE_INTERVAL + 1).period_offset(), 1);
// }
// }

View File

@@ -0,0 +1,904 @@
use {
super::{inscription_id::InscriptionId, media::Media, tag::Tag, *},
bitcoin::{constants::MAX_SCRIPT_ELEMENT_SIZE, hashes::Hash, opcodes, script, ScriptBuf, Txid},
ciborium::Value,
std::{io::Cursor, str},
};
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq, Default)]
pub struct Inscription {
pub body: Option<Vec<u8>>,
pub content_encoding: Option<Vec<u8>>,
pub content_type: Option<Vec<u8>>,
pub delegate: Option<Vec<u8>>,
pub duplicate_field: bool,
pub incomplete_field: bool,
pub metadata: Option<Vec<u8>>,
pub metaprotocol: Option<Vec<u8>>,
pub parents: Vec<Vec<u8>>,
pub pointer: Option<Vec<u8>>,
pub rune: Option<Vec<u8>>,
pub unrecognized_even_field: bool,
}
impl Inscription {
// pub fn new(
// chain: Chain,
// compress: bool,
// delegate: Option<InscriptionId>,
// metadata: Option<Vec<u8>>,
// metaprotocol: Option<String>,
// parents: Vec<InscriptionId>,
// path: Option<PathBuf>,
// pointer: Option<u64>,
// rune: Option<Rune>,
// ) -> Result<Self, Error> {
// let path = path.as_ref();
// let (body, content_type, content_encoding) = if let Some(path) = path {
// let body = fs::read(path).with_context(|| format!("io error reading {}", path.display()))?;
// let content_type = Media::content_type_for_path(path)?.0;
// let (body, content_encoding) = if compress {
// let compression_mode = Media::content_type_for_path(path)?.1;
// let mut compressed = Vec::new();
// {
// CompressorWriter::with_params(
// &mut compressed,
// body.len(),
// &BrotliEncoderParams {
// lgblock: 24,
// lgwin: 24,
// mode: compression_mode,
// quality: 11,
// size_hint: body.len(),
// ..default()
// },
// )
// .write_all(&body)?;
// let mut decompressor = brotli::Decompressor::new(compressed.as_slice(), compressed.len());
// let mut decompressed = Vec::new();
// decompressor.read_to_end(&mut decompressed)?;
// ensure!(decompressed == body, "decompression roundtrip failed");
// }
// if compressed.len() < body.len() {
// (compressed, Some("br".as_bytes().to_vec()))
// } else {
// (body, None)
// }
// } else {
// (body, None)
// };
// if let Some(limit) = chain.inscription_content_size_limit() {
// let len = body.len();
// if len > limit {
// bail!("content size of {len} bytes exceeds {limit} byte limit for {chain} inscriptions");
// }
// }
// (Some(body), Some(content_type), content_encoding)
// } else {
// (None, None, None)
// };
// Ok(Self {
// body,
// content_encoding,
// content_type: content_type.map(|content_type| content_type.into()),
// delegate: delegate.map(|delegate| delegate.value()),
// metadata,
// metaprotocol: metaprotocol.map(|metaprotocol| metaprotocol.into_bytes()),
// parents: parents.iter().map(|parent| parent.value()).collect(),
// pointer: pointer.map(Self::pointer_value),
// rune: rune.map(|rune| rune.commitment()),
// ..default()
// })
// }
pub fn pointer_value(pointer: u64) -> Vec<u8> {
let mut bytes = pointer.to_le_bytes().to_vec();
while bytes.last().copied() == Some(0) {
bytes.pop();
}
bytes
}
pub fn append_reveal_script_to_builder(&self, mut builder: script::Builder) -> script::Builder {
builder = builder
.push_opcode(opcodes::OP_FALSE)
.push_opcode(opcodes::all::OP_IF)
.push_slice(envelope::PROTOCOL_ID);
Tag::ContentType.append(&mut builder, &self.content_type);
Tag::ContentEncoding.append(&mut builder, &self.content_encoding);
Tag::Metaprotocol.append(&mut builder, &self.metaprotocol);
Tag::Parent.append_array(&mut builder, &self.parents);
Tag::Delegate.append(&mut builder, &self.delegate);
Tag::Pointer.append(&mut builder, &self.pointer);
Tag::Metadata.append(&mut builder, &self.metadata);
Tag::Rune.append(&mut builder, &self.rune);
if let Some(body) = &self.body {
builder = builder.push_slice(envelope::BODY_TAG);
for chunk in body.chunks(MAX_SCRIPT_ELEMENT_SIZE) {
builder = builder.push_slice::<&script::PushBytes>(chunk.try_into().unwrap());
}
}
builder.push_opcode(opcodes::all::OP_ENDIF)
}
// #[cfg(test)]
// pub(crate) fn append_reveal_script(&self, builder: script::Builder) -> ScriptBuf {
// self.append_reveal_script_to_builder(builder).into_script()
// }
pub fn append_batch_reveal_script_to_builder(
inscriptions: &[Inscription],
mut builder: script::Builder,
) -> script::Builder {
for inscription in inscriptions {
builder = inscription.append_reveal_script_to_builder(builder);
}
builder
}
pub fn append_batch_reveal_script(
inscriptions: &[Inscription],
builder: script::Builder,
) -> ScriptBuf {
Inscription::append_batch_reveal_script_to_builder(inscriptions, builder).into_script()
}
fn inscription_id_field(field: Option<&[u8]>) -> Option<InscriptionId> {
let value = field.as_ref()?;
if value.len() < Txid::LEN {
return None;
}
if value.len() > Txid::LEN + 4 {
return None;
}
let (txid, index) = value.split_at(Txid::LEN);
if let Some(last) = index.last() {
// Accept fixed length encoding with 4 bytes (with potential trailing zeroes)
// or variable length (no trailing zeroes)
if index.len() != 4 && *last == 0 {
return None;
}
}
let txid = Txid::from_slice(txid).unwrap();
let index = [
index.first().copied().unwrap_or(0),
index.get(1).copied().unwrap_or(0),
index.get(2).copied().unwrap_or(0),
index.get(3).copied().unwrap_or(0),
];
let index = u32::from_le_bytes(index);
Some(InscriptionId { txid, index })
}
pub fn media(&self) -> Media {
if self.body.is_none() {
return Media::Unknown;
}
let Some(content_type) = self.content_type() else {
return Media::Unknown;
};
content_type.parse().unwrap_or(Media::Unknown)
}
pub fn body(&self) -> Option<&[u8]> {
Some(self.body.as_ref()?)
}
pub fn into_body(self) -> Option<Vec<u8>> {
self.body
}
pub fn content_length(&self) -> Option<usize> {
Some(self.body()?.len())
}
pub fn content_type(&self) -> Option<&str> {
str::from_utf8(self.content_type.as_ref()?).ok()
}
// pub fn content_encoding(&self) -> Option<HeaderValue> {
// HeaderValue::from_str(str::from_utf8(self.content_encoding.as_ref()?).unwrap_or_default())
// .ok()
// }
pub fn delegate(&self) -> Option<InscriptionId> {
Self::inscription_id_field(self.delegate.as_deref())
}
pub fn metadata(&self) -> Option<Value> {
ciborium::from_reader(Cursor::new(self.metadata.as_ref()?)).ok()
}
pub fn metaprotocol(&self) -> Option<&str> {
str::from_utf8(self.metaprotocol.as_ref()?).ok()
}
pub fn parents(&self) -> Vec<InscriptionId> {
self.parents
.iter()
.filter_map(|parent| Self::inscription_id_field(Some(parent)))
.collect()
}
pub fn pointer(&self) -> Option<u64> {
let value = self.pointer.as_ref()?;
if value.iter().skip(8).copied().any(|byte| byte != 0) {
return None;
}
let pointer = [
value.first().copied().unwrap_or(0),
value.get(1).copied().unwrap_or(0),
value.get(2).copied().unwrap_or(0),
value.get(3).copied().unwrap_or(0),
value.get(4).copied().unwrap_or(0),
value.get(5).copied().unwrap_or(0),
value.get(6).copied().unwrap_or(0),
value.get(7).copied().unwrap_or(0),
];
Some(u64::from_le_bytes(pointer))
}
// #[cfg(test)]
// pub(crate) fn to_witness(&self) -> Witness {
// let builder = script::Builder::new();
// let script = self.append_reveal_script(builder);
// let mut witness = Witness::new();
// witness.push(script);
// witness.push([]);
// witness
// }
// pub fn hidden(&self) -> bool {
// use regex::bytes::Regex;
// const BVM_NETWORK: &[u8] = b"<body style=\"background:#F61;color:#fff;\">\
// <h1 style=\"height:100%\">bvm.network</h1></body>";
// lazy_static! {
// static ref BRC_420: Regex =
// Regex::new(r"^\s*/content/[[:xdigit:]]{64}i\d+\s*$").unwrap();
// }
// self.body()
// .map(|body| BRC_420.is_match(body) || body.starts_with(BVM_NETWORK))
// .unwrap_or_default()
// || self.metaprotocol.is_some()
// || matches!(self.media(), Media::Code(_) | Media::Text | Media::Unknown)
// }
}
// #[cfg(test)]
// mod tests {
// use {super::*, std::io::Write};
// #[test]
// fn reveal_script_chunks_body() {
// assert_eq!(
// inscription("foo", [])
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 7
// );
// assert_eq!(
// inscription("foo", [0; 1])
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 8
// );
// assert_eq!(
// inscription("foo", [0; 520])
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 8
// );
// assert_eq!(
// inscription("foo", [0; 521])
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 9
// );
// assert_eq!(
// inscription("foo", [0; 1040])
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 9
// );
// assert_eq!(
// inscription("foo", [0; 1041])
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 10
// );
// }
// #[test]
// fn reveal_script_chunks_metadata() {
// assert_eq!(
// Inscription {
// metadata: None,
// ..default()
// }
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 4
// );
// assert_eq!(
// Inscription {
// metadata: Some(Vec::new()),
// ..default()
// }
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 4
// );
// assert_eq!(
// Inscription {
// metadata: Some(vec![0; 1]),
// ..default()
// }
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 6
// );
// assert_eq!(
// Inscription {
// metadata: Some(vec![0; 520]),
// ..default()
// }
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 6
// );
// assert_eq!(
// Inscription {
// metadata: Some(vec![0; 521]),
// ..default()
// }
// .append_reveal_script(script::Builder::new())
// .instructions()
// .count(),
// 8
// );
// }
// #[test]
// fn inscription_with_no_parent_field_has_no_parent() {
// assert!(Inscription {
// parents: Vec::new(),
// ..default()
// }
// .parents()
// .is_empty());
// }
// #[test]
// fn inscription_with_parent_field_shorter_than_txid_length_has_no_parent() {
// assert!(Inscription {
// parents: vec![Vec::new()],
// ..default()
// }
// .parents()
// .is_empty());
// }
// #[test]
// fn inscription_with_parent_field_longer_than_txid_and_index_has_no_parent() {
// assert!(Inscription {
// parents: vec![vec![1; 37]],
// ..default()
// }
// .parents()
// .is_empty());
// }
// #[test]
// fn inscription_with_parent_field_index_with_trailing_zeroes_and_fixed_length_has_parent() {
// let mut parent = vec![1; 36];
// parent[35] = 0;
// assert!(!Inscription {
// parents: vec![parent],
// ..default()
// }
// .parents()
// .is_empty());
// }
// #[test]
// fn inscription_with_parent_field_index_with_trailing_zeroes_and_variable_length_has_no_parent() {
// let mut parent = vec![1; 35];
// parent[34] = 0;
// assert!(Inscription {
// parents: vec![parent],
// ..default()
// }
// .parents()
// .is_empty());
// }
// #[test]
// fn inscription_delegate_txid_is_deserialized_correctly() {
// assert_eq!(
// Inscription {
// delegate: Some(vec![
// 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
// 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d,
// 0x1e, 0x1f,
// ]),
// ..default()
// }
// .delegate()
// .unwrap()
// .txid,
// "1f1e1d1c1b1a191817161514131211100f0e0d0c0b0a09080706050403020100"
// .parse()
// .unwrap()
// );
// }
// #[test]
// fn inscription_parent_txid_is_deserialized_correctly() {
// assert_eq!(
// Inscription {
// parents: vec![vec![
// 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
// 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d,
// 0x1e, 0x1f,
// ]],
// ..default()
// }
// .parents(),
// [
// "1f1e1d1c1b1a191817161514131211100f0e0d0c0b0a09080706050403020100i0"
// .parse()
// .unwrap()
// ],
// );
// }
// #[test]
// fn inscription_parent_with_zero_byte_index_field_is_deserialized_correctly() {
// assert_eq!(
// Inscription {
// parents: vec![vec![1; 32]],
// ..default()
// }
// .parents(),
// [
// "0101010101010101010101010101010101010101010101010101010101010101i0"
// .parse()
// .unwrap()
// ],
// );
// }
// #[test]
// fn inscription_parent_with_one_byte_index_field_is_deserialized_correctly() {
// assert_eq!(
// Inscription {
// parents: vec![vec![
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0x01
// ]],
// ..default()
// }
// .parents(),
// [
// "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffi1"
// .parse()
// .unwrap()
// ],
// );
// }
// #[test]
// fn inscription_parent_with_two_byte_index_field_is_deserialized_correctly() {
// assert_eq!(
// Inscription {
// parents: vec![vec![
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0x01, 0x02
// ]],
// ..default()
// }
// .parents(),
// [
// "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffi513"
// .parse()
// .unwrap()
// ],
// );
// }
// #[test]
// fn inscription_parent_with_three_byte_index_field_is_deserialized_correctly() {
// assert_eq!(
// Inscription {
// parents: vec![vec![
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0x01, 0x02, 0x03
// ]],
// ..default()
// }
// .parents(),
// [
// "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffi197121"
// .parse()
// .unwrap()
// ],
// );
// }
// #[test]
// fn inscription_parent_with_four_byte_index_field_is_deserialized_correctly() {
// assert_eq!(
// Inscription {
// parents: vec![vec![
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0x01, 0x02, 0x03, 0x04,
// ]],
// ..default()
// }
// .parents(),
// [
// "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffi67305985"
// .parse()
// .unwrap()
// ],
// );
// }
// #[test]
// fn inscription_parent_returns_multiple_parents() {
// assert_eq!(
// Inscription {
// parents: vec![
// vec![
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0xff, 0xff, 0x01, 0x02, 0x03, 0x04,
// ],
// vec![
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
// 0xff, 0xff, 0xff, 0xff, 0x00, 0x02, 0x03, 0x04,
// ]
// ],
// ..default()
// }
// .parents(),
// [
// "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffi67305985"
// .parse()
// .unwrap(),
// "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffi67305984"
// .parse()
// .unwrap()
// ],
// );
// }
// #[test]
// fn metadata_function_decodes_metadata() {
// assert_eq!(
// Inscription {
// metadata: Some(vec![0x44, 0, 1, 2, 3]),
// ..default()
// }
// .metadata()
// .unwrap(),
// Value::Bytes(vec![0, 1, 2, 3]),
// );
// }
// #[test]
// fn metadata_function_returns_none_if_no_metadata() {
// assert_eq!(
// Inscription {
// metadata: None,
// ..default()
// }
// .metadata(),
// None,
// );
// }
// #[test]
// fn metadata_function_returns_none_if_metadata_fails_to_parse() {
// assert_eq!(
// Inscription {
// metadata: Some(vec![0x44]),
// ..default()
// }
// .metadata(),
// None,
// );
// }
// #[test]
// fn pointer_decode() {
// assert_eq!(
// Inscription {
// pointer: None,
// ..default()
// }
// .pointer(),
// None
// );
// assert_eq!(
// Inscription {
// pointer: Some(vec![0]),
// ..default()
// }
// .pointer(),
// Some(0),
// );
// assert_eq!(
// Inscription {
// pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8]),
// ..default()
// }
// .pointer(),
// Some(0x0807060504030201),
// );
// assert_eq!(
// Inscription {
// pointer: Some(vec![1, 2, 3, 4, 5, 6]),
// ..default()
// }
// .pointer(),
// Some(0x0000060504030201),
// );
// assert_eq!(
// Inscription {
// pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0, 0]),
// ..default()
// }
// .pointer(),
// Some(0x0807060504030201),
// );
// assert_eq!(
// Inscription {
// pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0, 1]),
// ..default()
// }
// .pointer(),
// None,
// );
// assert_eq!(
// Inscription {
// pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8, 1]),
// ..default()
// }
// .pointer(),
// None,
// );
// }
// #[test]
// fn pointer_encode() {
// assert_eq!(
// Inscription {
// pointer: None,
// ..default()
// }
// .to_witness(),
// envelope(&[b"ord"]),
// );
// assert_eq!(
// Inscription {
// pointer: Some(vec![1, 2, 3]),
// ..default()
// }
// .to_witness(),
// envelope(&[b"ord", &[2], &[1, 2, 3]]),
// );
// }
// #[test]
// fn pointer_value() {
// let mut file = tempfile::Builder::new().suffix(".txt").tempfile().unwrap();
// write!(file, "foo").unwrap();
// let inscription = Inscription::new(
// Chain::Mainnet,
// false,
// None,
// None,
// None,
// Vec::new(),
// Some(file.path().to_path_buf()),
// None,
// None,
// )
// .unwrap();
// assert_eq!(inscription.pointer, None);
// let inscription = Inscription::new(
// Chain::Mainnet,
// false,
// None,
// None,
// None,
// Vec::new(),
// Some(file.path().to_path_buf()),
// Some(0),
// None,
// )
// .unwrap();
// assert_eq!(inscription.pointer, Some(Vec::new()));
// let inscription = Inscription::new(
// Chain::Mainnet,
// false,
// None,
// None,
// None,
// Vec::new(),
// Some(file.path().to_path_buf()),
// Some(1),
// None,
// )
// .unwrap();
// assert_eq!(inscription.pointer, Some(vec![1]));
// let inscription = Inscription::new(
// Chain::Mainnet,
// false,
// None,
// None,
// None,
// Vec::new(),
// Some(file.path().to_path_buf()),
// Some(256),
// None,
// )
// .unwrap();
// assert_eq!(inscription.pointer, Some(vec![0, 1]));
// }
// #[test]
// fn hidden() {
// #[track_caller]
// fn case(content_type: Option<&str>, body: Option<&str>, expected: bool) {
// assert_eq!(
// Inscription {
// content_type: content_type.map(|content_type| content_type.as_bytes().into()),
// body: body.map(|content_type| content_type.as_bytes().into()),
// ..default()
// }
// .hidden(),
// expected
// );
// }
// case(None, None, true);
// case(Some("foo"), Some(""), true);
// case(Some("text/plain"), None, true);
// case(
// Some("text/plain"),
// Some("The fox jumped. The cow danced."),
// true,
// );
// case(Some("text/plain;charset=utf-8"), Some("foo"), true);
// case(Some("text/plain;charset=cn-big5"), Some("foo"), true);
// case(Some("application/json"), Some("foo"), true);
// case(
// Some("text/markdown"),
// Some("/content/09a8d837ec0bcaec668ecf405e696a16bee5990863659c224ff888fb6f8f45e7i0"),
// true,
// );
// case(
// Some("text/html"),
// Some("/content/09a8d837ec0bcaec668ecf405e696a16bee5990863659c224ff888fb6f8f45e7i0"),
// true,
// );
// case(Some("application/yaml"), Some(""), true);
// case(
// Some("text/html;charset=utf-8"),
// Some("/content/09a8d837ec0bcaec668ecf405e696a16bee5990863659c224ff888fb6f8f45e7i0"),
// true,
// );
// case(
// Some("text/html"),
// Some(" /content/09a8d837ec0bcaec668ecf405e696a16bee5990863659c224ff888fb6f8f45e7i0 \n"),
// true,
// );
// case(
// Some("text/html"),
// Some(
// r#"<body style="background:#F61;color:#fff;"><h1 style="height:100%">bvm.network</h1></body>"#,
// ),
// true,
// );
// case(
// Some("text/html"),
// Some(
// r#"<body style="background:#F61;color:#fff;"><h1 style="height:100%">bvm.network</h1></body>foo"#,
// ),
// true,
// );
// assert!(Inscription {
// content_type: Some("text/plain".as_bytes().into()),
// body: Some(b"{\xc3\x28}".as_slice().into()),
// ..default()
// }
// .hidden());
// assert!(Inscription {
// content_type: Some("text/html".as_bytes().into()),
// body: Some("hello".as_bytes().into()),
// metaprotocol: Some(Vec::new()),
// ..default()
// }
// .hidden());
// }
// }

View File

@@ -0,0 +1,193 @@
use std::fmt::{self, Display, Formatter};
use bitcoin::Txid;
#[derive(Debug, PartialEq, Copy, Clone, Hash, Eq, PartialOrd, Ord)]
pub struct InscriptionId {
pub txid: Txid,
pub index: u32,
}
// impl Default for InscriptionId {
// fn default() -> Self {
// Self {
// txid: Txid::all_zeros(),
// index: 0,
// }
// }
// }
// impl InscriptionId {
// pub(crate) fn value(self) -> Vec<u8> {
// let index = self.index.to_le_bytes();
// let mut index_slice = index.as_slice();
// while index_slice.last().copied() == Some(0) {
// index_slice = &index_slice[0..index_slice.len() - 1];
// }
// self
// .txid
// .to_byte_array()
// .iter()
// .chain(index_slice)
// .copied()
// .collect()
// }
// }
impl Display for InscriptionId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}i{}", self.txid, self.index)
}
}
// #[derive(Debug)]
// pub enum ParseError {
// Character(char),
// Length(usize),
// Separator(char),
// Txid(bitcoin::hex::HexToArrayError),
// Index(std::num::ParseIntError),
// }
// impl Display for ParseError {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// match self {
// Self::Character(c) => write!(f, "invalid character: '{c}'"),
// Self::Length(len) => write!(f, "invalid length: {len}"),
// Self::Separator(c) => write!(f, "invalid separator: `{c}`"),
// Self::Txid(err) => write!(f, "invalid txid: {err}"),
// Self::Index(err) => write!(f, "invalid index: {err}"),
// }
// }
// }
// impl std::error::Error for ParseError {}
// impl FromStr for InscriptionId {
// type Err = ParseError;
// fn from_str(s: &str) -> Result<Self, Self::Err> {
// if let Some(char) = s.chars().find(|char| !char.is_ascii()) {
// return Err(ParseError::Character(char));
// }
// const TXID_LEN: usize = 64;
// const MIN_LEN: usize = TXID_LEN + 2;
// if s.len() < MIN_LEN {
// return Err(ParseError::Length(s.len()));
// }
// let txid = &s[..TXID_LEN];
// let separator = s.chars().nth(TXID_LEN).unwrap();
// if separator != 'i' {
// return Err(ParseError::Separator(separator));
// }
// let vout = &s[TXID_LEN + 1..];
// Ok(Self {
// txid: txid.parse().map_err(ParseError::Txid)?,
// index: vout.parse().map_err(ParseError::Index)?,
// })
// }
// }
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn display() {
// assert_eq!(
// inscription_id(1).to_string(),
// "1111111111111111111111111111111111111111111111111111111111111111i1",
// );
// assert_eq!(
// InscriptionId {
// txid: txid(1),
// index: 0,
// }
// .to_string(),
// "1111111111111111111111111111111111111111111111111111111111111111i0",
// );
// assert_eq!(
// InscriptionId {
// txid: txid(1),
// index: 0xFFFFFFFF,
// }
// .to_string(),
// "1111111111111111111111111111111111111111111111111111111111111111i4294967295",
// );
// }
// #[test]
// fn from_str() {
// assert_eq!(
// "1111111111111111111111111111111111111111111111111111111111111111i1"
// .parse::<InscriptionId>()
// .unwrap(),
// inscription_id(1),
// );
// assert_eq!(
// "1111111111111111111111111111111111111111111111111111111111111111i4294967295"
// .parse::<InscriptionId>()
// .unwrap(),
// InscriptionId {
// txid: txid(1),
// index: 0xFFFFFFFF,
// },
// );
// assert_eq!(
// "1111111111111111111111111111111111111111111111111111111111111111i4294967295"
// .parse::<InscriptionId>()
// .unwrap(),
// InscriptionId {
// txid: txid(1),
// index: 0xFFFFFFFF,
// },
// );
// }
// #[test]
// fn from_str_bad_character() {
// assert_matches!(
// "→".parse::<InscriptionId>(),
// Err(ParseError::Character('→')),
// );
// }
// #[test]
// fn from_str_bad_length() {
// assert_matches!("foo".parse::<InscriptionId>(), Err(ParseError::Length(3)));
// }
// #[test]
// fn from_str_bad_separator() {
// assert_matches!(
// "0000000000000000000000000000000000000000000000000000000000000000x0".parse::<InscriptionId>(),
// Err(ParseError::Separator('x')),
// );
// }
// #[test]
// fn from_str_bad_index() {
// assert_matches!(
// "0000000000000000000000000000000000000000000000000000000000000000ifoo"
// .parse::<InscriptionId>(),
// Err(ParseError::Index(_)),
// );
// }
// #[test]
// fn from_str_bad_txid() {
// assert_matches!(
// "x000000000000000000000000000000000000000000000000000000000000000i0".parse::<InscriptionId>(),
// Err(ParseError::Txid(_)),
// );
// }
// }

27
components/ord/src/lib.rs Normal file
View File

@@ -0,0 +1,27 @@
#![allow(dead_code)]
#![allow(unused_variables)]
#[macro_use]
extern crate serde_derive;
type Result<T = (), E = anyhow::Error> = std::result::Result<T, E>;
pub mod chain;
pub mod charm;
pub mod decimal_sat;
pub mod degree;
pub mod envelope;
pub mod epoch;
pub mod height;
pub mod inscription;
pub mod inscription_id;
pub mod media;
pub mod rarity;
pub mod sat;
pub mod sat_point;
pub mod tag;
pub const SUBSIDY_HALVING_INTERVAL: u32 = 210_000;
pub const DIFFCHANGE_INTERVAL: u32 = 2016;
pub const CYCLE_EPOCHS: u32 = 6;
pub const COIN_VALUE: u64 = 100_000_000;

229
components/ord/src/media.rs Normal file
View File

@@ -0,0 +1,229 @@
use std::str::FromStr;
use anyhow::{anyhow, Error};
use self::{ImageRendering::*, Language::*, Media::*};
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum Media {
Audio,
Code(Language),
Font,
Iframe,
Image(ImageRendering),
Markdown,
Model,
Pdf,
Text,
Unknown,
Video,
}
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum Language {
Css,
JavaScript,
Json,
Python,
Yaml,
}
// impl Display for Language {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(
// f,
// "{}",
// match self {
// Self::Css => "css",
// Self::JavaScript => "javascript",
// Self::Json => "json",
// Self::Python => "python",
// Self::Yaml => "yaml",
// }
// )
// }
// }
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum ImageRendering {
Auto,
Pixelated,
}
// impl Display for ImageRendering {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(
// f,
// "{}",
// match self {
// Self::Auto => "auto",
// Self::Pixelated => "pixelated",
// }
// )
// }
// }
impl Media {
#[rustfmt::skip]
const TABLE: &'static [(&'static str, Media, &'static [&'static str])] = &[
("application/cbor", Unknown, &["cbor"]),
("application/json", Code(Json), &["json"]),
("application/octet-stream", Unknown, &["bin"]),
("application/pdf", Pdf, &["pdf"]),
("application/pgp-signature", Text, &["asc"]),
("application/protobuf", Unknown, &["binpb"]),
("application/x-bittorrent", Unknown, &["torrent"]),
("application/x-javascript", Code(JavaScript), &[]),
("application/yaml", Code(Yaml), &["yaml", "yml"]),
("audio/flac", Audio, &["flac"]),
("audio/mpeg", Audio, &["mp3"]),
("audio/ogg;codecs=opus", Audio, &["opus"]),
("audio/wav", Audio, &["wav"]),
("font/otf", Font, &["otf"]),
("font/ttf", Font, &["ttf"]),
("font/woff", Font, &["woff"]),
("font/woff2", Font, &["woff2"]),
("image/apng", Image(Pixelated), &["apng"]),
("image/avif", Image(Auto), &["avif"]),
("image/gif", Image(Pixelated), &["gif"]),
("image/jpeg", Image(Pixelated), &["jpg", "jpeg"]),
("image/jxl", Image(Auto), &[]),
("image/png", Image(Pixelated), &["png"]),
("image/svg+xml", Iframe, &["svg"]),
("image/webp", Image(Pixelated), &["webp"]),
("model/gltf+json", Model, &["gltf"]),
("model/gltf-binary", Model, &["glb"]),
("model/stl", Unknown, &["stl"]),
("text/css", Code(Css), &["css"]),
("text/html", Iframe, &[]),
("text/html;charset=utf-8", Iframe, &["html"]),
("text/javascript", Code(JavaScript), &["js", "mjs"]),
("text/markdown", Markdown, &[]),
("text/markdown;charset=utf-8", Markdown, &["md"]),
("text/plain", Text, &[]),
("text/plain;charset=utf-8", Text, &["txt"]),
("text/x-python", Code(Python), &["py"]),
("video/mp4", Video, &["mp4"]),
("video/webm", Video, &["webm"]),
];
// pub(crate) fn content_type_for_path(
// path: &Path,
// ) -> Result<(&'static str, BrotliEncoderMode), Error> {
// let extension = path
// .extension()
// .ok_or_else(|| anyhow!("file must have extension"))?
// .to_str()
// .ok_or_else(|| anyhow!("unrecognized extension"))?;
// let extension = extension.to_lowercase();
// if extension == "mp4" {
// Media::check_mp4_codec(path)?;
// }
// for (content_type, mode, _, extensions) in Self::TABLE {
// if extensions.contains(&extension.as_str()) {
// return Ok((*content_type, *mode));
// }
// }
// let mut extensions = Self::TABLE
// .iter()
// .flat_map(|(_, _, _, extensions)| extensions.first().cloned())
// .collect::<Vec<&str>>();
// extensions.sort();
// Err(anyhow!(
// "unsupported file extension `.{extension}`, supported extensions: {}",
// extensions.join(" "),
// ))
// }
// pub(crate) fn check_mp4_codec(path: &Path) -> Result<(), Error> {
// let f = File::open(path)?;
// let size = f.metadata()?.len();
// let reader = BufReader::new(f);
// let mp4 = Mp4Reader::read_header(reader, size)?;
// for track in mp4.tracks().values() {
// if let TrackType::Video = track.track_type()? {
// let media_type = track.media_type()?;
// if media_type != MediaType::H264 {
// return Err(anyhow!(
// "Unsupported video codec, only H.264 is supported in MP4: {media_type}"
// ));
// }
// }
// }
// Ok(())
// }
}
impl FromStr for Media {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
for entry in Self::TABLE {
if entry.0 == s {
return Ok(entry.1);
}
}
Err(anyhow!("unknown content type: {s}"))
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn for_extension() {
// assert_eq!(
// Media::content_type_for_path(Path::new("pepe.jpg")).unwrap(),
// ("image/jpeg", BrotliEncoderMode::BROTLI_MODE_GENERIC)
// );
// assert_eq!(
// Media::content_type_for_path(Path::new("pepe.jpeg")).unwrap(),
// ("image/jpeg", BrotliEncoderMode::BROTLI_MODE_GENERIC)
// );
// assert_eq!(
// Media::content_type_for_path(Path::new("pepe.JPG")).unwrap(),
// ("image/jpeg", BrotliEncoderMode::BROTLI_MODE_GENERIC)
// );
// assert_eq!(
// Media::content_type_for_path(Path::new("pepe.txt")).unwrap(),
// (
// "text/plain;charset=utf-8",
// BrotliEncoderMode::BROTLI_MODE_TEXT
// )
// );
// assert_regex_match!(
// Media::content_type_for_path(Path::new("pepe.foo")).unwrap_err(),
// r"unsupported file extension `\.foo`, supported extensions: apng .*"
// );
// }
// #[test]
// fn h264_in_mp4_is_allowed() {
// assert!(Media::check_mp4_codec(Path::new("examples/h264.mp4")).is_ok(),);
// }
// #[test]
// fn av1_in_mp4_is_rejected() {
// assert!(Media::check_mp4_codec(Path::new("examples/av1.mp4")).is_err(),);
// }
// #[test]
// fn no_duplicate_extensions() {
// let mut set = HashSet::new();
// for (_, _, _, extensions) in Media::TABLE {
// for extension in *extensions {
// assert!(set.insert(extension), "duplicate extension `{extension}`");
// }
// }
// }
// }

View File

@@ -0,0 +1,231 @@
use std::{fmt::{self, Display, Formatter}, str::FromStr};
use super::{degree::Degree, sat::Sat, *};
#[derive(
Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd
)]
pub enum Rarity {
Common,
Uncommon,
Rare,
Epic,
Legendary,
Mythic,
}
impl Rarity {
pub const ALL: [Rarity; 6] = [
Rarity::Common,
Rarity::Uncommon,
Rarity::Rare,
Rarity::Epic,
Rarity::Legendary,
Rarity::Mythic,
];
pub fn supply(self) -> u64 {
match self {
Self::Common => 2_099_999_990_760_000,
Self::Uncommon => 6_926_535,
Self::Rare => 3_432,
Self::Epic => 27,
Self::Legendary => 5,
Self::Mythic => 1,
}
}
}
impl From<Rarity> for u8 {
fn from(rarity: Rarity) -> Self {
rarity as u8
}
}
impl TryFrom<u8> for Rarity {
type Error = u8;
fn try_from(rarity: u8) -> Result<Self, u8> {
match rarity {
0 => Ok(Self::Common),
1 => Ok(Self::Uncommon),
2 => Ok(Self::Rare),
3 => Ok(Self::Epic),
4 => Ok(Self::Legendary),
5 => Ok(Self::Mythic),
n => Err(n),
}
}
}
impl Display for Rarity {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Self::Common => "common",
Self::Uncommon => "uncommon",
Self::Rare => "rare",
Self::Epic => "epic",
Self::Legendary => "legendary",
Self::Mythic => "mythic",
}
)
}
}
impl From<Sat> for Rarity {
fn from(sat: Sat) -> Self {
let Degree {
hour,
minute,
second,
third,
} = sat.degree();
if hour == 0 && minute == 0 && second == 0 && third == 0 {
Self::Mythic
} else if minute == 0 && second == 0 && third == 0 {
Self::Legendary
} else if minute == 0 && third == 0 {
Self::Epic
} else if second == 0 && third == 0 {
Self::Rare
} else if third == 0 {
Self::Uncommon
} else {
Self::Common
}
}
}
impl FromStr for Rarity {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"common" => Ok(Self::Common),
"uncommon" => Ok(Self::Uncommon),
"rare" => Ok(Self::Rare),
"epic" => Ok(Self::Epic),
"legendary" => Ok(Self::Legendary),
"mythic" => Ok(Self::Mythic),
_ => Err(format!("invalid rarity `{s}`")),
}
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn rarity() {
// assert_eq!(Sat(0).rarity(), Rarity::Mythic);
// assert_eq!(Sat(1).rarity(), Rarity::Common);
// assert_eq!(Sat(50 * COIN_VALUE - 1).rarity(), Rarity::Common);
// assert_eq!(Sat(50 * COIN_VALUE).rarity(), Rarity::Uncommon);
// assert_eq!(Sat(50 * COIN_VALUE + 1).rarity(), Rarity::Common);
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(DIFFCHANGE_INTERVAL) - 1).rarity(),
// Rarity::Common
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(DIFFCHANGE_INTERVAL)).rarity(),
// Rarity::Rare
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(DIFFCHANGE_INTERVAL) + 1).rarity(),
// Rarity::Common
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL) - 1).rarity(),
// Rarity::Common
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL)).rarity(),
// Rarity::Epic
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL) + 1).rarity(),
// Rarity::Common
// );
// assert_eq!(Sat(2067187500000000 - 1).rarity(), Rarity::Common);
// assert_eq!(Sat(2067187500000000).rarity(), Rarity::Legendary);
// assert_eq!(Sat(2067187500000000 + 1).rarity(), Rarity::Common);
// }
// #[test]
// fn from_str_and_deserialize_ok() {
// #[track_caller]
// fn case(s: &str, expected: Rarity) {
// let actual = s.parse::<Rarity>().unwrap();
// assert_eq!(actual, expected);
// let round_trip = actual.to_string().parse::<Rarity>().unwrap();
// assert_eq!(round_trip, expected);
// let serialized = serde_json::to_string(&expected).unwrap();
// assert!(serde_json::from_str::<Rarity>(&serialized).is_ok());
// }
// case("common", Rarity::Common);
// case("uncommon", Rarity::Uncommon);
// case("rare", Rarity::Rare);
// case("epic", Rarity::Epic);
// case("legendary", Rarity::Legendary);
// case("mythic", Rarity::Mythic);
// }
// #[test]
// fn conversions_with_u8() {
// for expected in Rarity::ALL {
// let n: u8 = expected.into();
// let actual = Rarity::try_from(n).unwrap();
// assert_eq!(actual, expected);
// }
// assert_eq!(Rarity::try_from(6), Err(6));
// }
// #[test]
// fn error() {
// assert_eq!("foo".parse::<Rarity>().unwrap_err(), "invalid rarity `foo`");
// }
// #[test]
// fn supply() {
// let mut i = 0;
// let mut supply = HashMap::<Rarity, u64>::new();
// for height in 0.. {
// let subsidy = Height(height).subsidy();
// if subsidy == 0 {
// break;
// }
// *supply.entry(Sat(i).rarity()).or_default() += 1;
// *supply.entry(Rarity::Common).or_default() += subsidy.saturating_sub(1);
// i += subsidy;
// }
// for (rarity, supply) in &supply {
// assert_eq!(
// rarity.supply(),
// *supply,
// "invalid supply for rarity {rarity}"
// );
// }
// assert_eq!(supply.values().sum::<u64>(), Sat::SUPPLY);
// assert_eq!(supply.len(), Rarity::ALL.len());
// }
// }

841
components/ord/src/sat.rs Normal file
View File

@@ -0,0 +1,841 @@
use std::ops::Add;
use super::{
charm::Charm, decimal_sat::DecimalSat, degree::Degree, epoch::Epoch, height::Height,
rarity::Rarity, *,
};
#[derive(Copy, Clone, Eq, PartialEq, Debug, Ord, PartialOrd, Deserialize, Serialize)]
#[serde(transparent)]
pub struct Sat(pub u64);
impl Sat {
pub const LAST: Self = Self(Self::SUPPLY - 1);
pub const SUPPLY: u64 = 2099999997690000;
pub fn n(self) -> u64 {
self.0
}
pub fn degree(self) -> Degree {
self.into()
}
pub fn height(self) -> Height {
self.epoch().starting_height()
+ u32::try_from(self.epoch_position() / self.epoch().subsidy()).unwrap()
}
pub fn cycle(self) -> u32 {
Epoch::from(self).0 / CYCLE_EPOCHS
}
pub fn nineball(self) -> bool {
self.n() >= 50 * COIN_VALUE * 9 && self.n() < 50 * COIN_VALUE * 10
}
pub fn palindrome(self) -> bool {
let mut n = self.0;
let mut reversed = 0;
while n > 0 {
reversed = reversed * 10 + n % 10;
n /= 10;
}
self.0 == reversed
}
pub fn percentile(self) -> String {
format!("{}%", (self.0 as f64 / Self::LAST.0 as f64) * 100.0)
}
pub fn epoch(self) -> Epoch {
self.into()
}
pub fn period(self) -> u32 {
self.height().n() / DIFFCHANGE_INTERVAL
}
pub fn third(self) -> u64 {
self.epoch_position() % self.epoch().subsidy()
}
pub fn epoch_position(self) -> u64 {
self.0 - self.epoch().starting_sat().0
}
pub fn decimal(self) -> DecimalSat {
self.into()
}
pub fn rarity(self) -> Rarity {
self.into()
}
/// Is this sat common or not? Much faster than `Sat::rarity()`.
pub fn common(self) -> bool {
// The block rewards for epochs 0 through 9 are all multiples
// of 9765625 (the epoch 9 reward), so any sat from epoch 9 or
// earlier that isn't divisible by 9765625 is definitely common.
if self < Epoch(10).starting_sat() && self.0 % Epoch(9).subsidy() != 0 {
return true;
}
// Fall back to the full calculation.
let epoch = self.epoch();
(self.0 - epoch.starting_sat().0) % epoch.subsidy() != 0
}
pub fn coin(self) -> bool {
self.n() % COIN_VALUE == 0
}
pub fn name(self) -> String {
let mut x = Self::SUPPLY - self.0;
let mut name = String::new();
while x > 0 {
name.push(
"abcdefghijklmnopqrstuvwxyz"
.chars()
.nth(((x - 1) % 26) as usize)
.unwrap(),
);
x = (x - 1) / 26;
}
name.chars().rev().collect()
}
pub fn charms(self) -> u16 {
let mut charms = 0;
if self.nineball() {
Charm::Nineball.set(&mut charms);
}
if self.palindrome() {
Charm::Palindrome.set(&mut charms);
}
if self.coin() {
Charm::Coin.set(&mut charms);
}
match self.rarity() {
Rarity::Common => {}
Rarity::Epic => Charm::Epic.set(&mut charms),
Rarity::Legendary => Charm::Legendary.set(&mut charms),
Rarity::Mythic => Charm::Mythic.set(&mut charms),
Rarity::Rare => Charm::Rare.set(&mut charms),
Rarity::Uncommon => Charm::Uncommon.set(&mut charms),
}
charms
}
// fn from_name(s: &str) -> Result<Self, Error> {
// let mut x = 0;
// for c in s.chars() {
// match c {
// 'a'..='z' => {
// x = x * 26 + c as u64 - 'a' as u64 + 1;
// if x > Self::SUPPLY {
// return Err(ErrorKind::NameRange.error(s));
// }
// }
// _ => return Err(ErrorKind::NameCharacter.error(s)),
// }
// }
// Ok(Sat(Self::SUPPLY - x))
// }
// fn from_degree(degree: &str) -> Result<Self, Error> {
// let (cycle_number, rest) = degree
// .split_once('°')
// .ok_or_else(|| ErrorKind::MissingDegree.error(degree))?;
// let cycle_number = cycle_number
// .parse::<u32>()
// .map_err(|source| ErrorKind::ParseInt { source }.error(degree))?;
// let (epoch_offset, rest) = rest
// .split_once('')
// .ok_or_else(|| ErrorKind::MissingMinute.error(degree))?;
// let epoch_offset = epoch_offset
// .parse::<u32>()
// .map_err(|source| ErrorKind::ParseInt { source }.error(degree))?;
// if epoch_offset >= SUBSIDY_HALVING_INTERVAL {
// return Err(ErrorKind::EpochOffset.error(degree));
// }
// let (period_offset, rest) = rest
// .split_once('″')
// .ok_or_else(|| ErrorKind::MissingSecond.error(degree))?;
// let period_offset = period_offset
// .parse::<u32>()
// .map_err(|source| ErrorKind::ParseInt { source }.error(degree))?;
// if period_offset >= DIFFCHANGE_INTERVAL {
// return Err(ErrorKind::PeriodOffset.error(degree));
// }
// let cycle_start_epoch = cycle_number * CYCLE_EPOCHS;
// const HALVING_INCREMENT: u32 = SUBSIDY_HALVING_INTERVAL % DIFFCHANGE_INTERVAL;
// // For valid degrees the relationship between epoch_offset and period_offset
// // will increment by 336 every halving.
// let relationship = period_offset + SUBSIDY_HALVING_INTERVAL * CYCLE_EPOCHS - epoch_offset;
// if relationship % HALVING_INCREMENT != 0 {
// return Err(ErrorKind::EpochPeriodMismatch.error(degree));
// }
// let epochs_since_cycle_start = relationship % DIFFCHANGE_INTERVAL / HALVING_INCREMENT;
// let epoch = cycle_start_epoch + epochs_since_cycle_start;
// let height = Height(epoch * SUBSIDY_HALVING_INTERVAL + epoch_offset);
// let (block_offset, rest) = match rest.split_once('‴') {
// Some((block_offset, rest)) => (
// block_offset
// .parse::<u64>()
// .map_err(|source| ErrorKind::ParseInt { source }.error(degree))?,
// rest,
// ),
// None => (0, rest),
// };
// if !rest.is_empty() {
// return Err(ErrorKind::TrailingCharacters.error(degree));
// }
// if block_offset >= height.subsidy() {
// return Err(ErrorKind::BlockOffset.error(degree));
// }
// Ok(height.starting_sat() + block_offset)
// }
// fn from_decimal(decimal: &str) -> Result<Self, Error> {
// let (height, offset) = decimal
// .split_once('.')
// .ok_or_else(|| ErrorKind::MissingPeriod.error(decimal))?;
// let height = Height(
// height
// .parse()
// .map_err(|source| ErrorKind::ParseInt { source }.error(decimal))?,
// );
// let offset = offset
// .parse::<u64>()
// .map_err(|source| ErrorKind::ParseInt { source }.error(decimal))?;
// if offset >= height.subsidy() {
// return Err(ErrorKind::BlockOffset.error(decimal));
// }
// Ok(height.starting_sat() + offset)
// }
// fn from_percentile(percentile: &str) -> Result<Self, Error> {
// if !percentile.ends_with('%') {
// return Err(ErrorKind::Percentile.error(percentile));
// }
// let percentile_string = percentile;
// let percentile = percentile[..percentile.len() - 1]
// .parse::<f64>()
// .map_err(|source| ErrorKind::ParseFloat { source }.error(percentile))?;
// if percentile < 0.0 {
// return Err(ErrorKind::Percentile.error(percentile_string));
// }
// let last = Sat::LAST.n() as f64;
// let n = (percentile / 100.0 * last).round();
// if n > last {
// return Err(ErrorKind::Percentile.error(percentile_string));
// }
// #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
// Ok(Sat(n as u64))
// }
}
// #[derive(Debug, Error)]
// pub struct Error {
// input: String,
// kind: ErrorKind,
// }
// impl Display for Error {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(f, "failed to parse sat `{}`: {}", self.input, self.kind)
// }
// }
// #[derive(Debug, Error)]
// pub enum ErrorKind {
// IntegerRange,
// NameRange,
// NameCharacter,
// Percentile,
// BlockOffset,
// MissingPeriod,
// TrailingCharacters,
// MissingDegree,
// MissingMinute,
// MissingSecond,
// PeriodOffset,
// EpochOffset,
// EpochPeriodMismatch,
// ParseInt { source: ParseIntError },
// ParseFloat { source: ParseFloatError },
// }
// impl ErrorKind {
// fn error(self, input: &str) -> Error {
// Error {
// input: input.to_string(),
// kind: self,
// }
// }
// }
// impl Display for ErrorKind {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// match self {
// Self::IntegerRange => write!(f, "invalid integer range"),
// Self::NameRange => write!(f, "invalid name range"),
// Self::NameCharacter => write!(f, "invalid character in name"),
// Self::Percentile => write!(f, "invalid percentile"),
// Self::BlockOffset => write!(f, "invalid block offset"),
// Self::MissingPeriod => write!(f, "missing period"),
// Self::TrailingCharacters => write!(f, "trailing character"),
// Self::MissingDegree => write!(f, "missing degree symbol"),
// Self::MissingMinute => write!(f, "missing minute symbol"),
// Self::MissingSecond => write!(f, "missing second symbol"),
// Self::PeriodOffset => write!(f, "invalid period offset"),
// Self::EpochOffset => write!(f, "invalid epoch offset"),
// Self::EpochPeriodMismatch => write!(
// f,
// "relationship between epoch offset and period offset must be multiple of 336"
// ),
// Self::ParseInt { source } => write!(f, "invalid integer: {source}"),
// Self::ParseFloat { source } => write!(f, "invalid float: {source}"),
// }
// }
// }
// impl PartialEq<u64> for Sat {
// fn eq(&self, other: &u64) -> bool {
// self.0 == *other
// }
// }
// impl PartialOrd<u64> for Sat {
// fn partial_cmp(&self, other: &u64) -> Option<cmp::Ordering> {
// self.0.partial_cmp(other)
// }
// }
impl Add<u64> for Sat {
type Output = Self;
fn add(self, other: u64) -> Sat {
Sat(self.0 + other)
}
}
// impl AddAssign<u64> for Sat {
// fn add_assign(&mut self, other: u64) {
// *self = Sat(self.0 + other);
// }
// }
// impl FromStr for Sat {
// type Err = Error;
// fn from_str(s: &str) -> Result<Self, Self::Err> {
// if s.chars().any(|c| c.is_ascii_lowercase()) {
// Self::from_name(s)
// } else if s.contains('°') {
// Self::from_degree(s)
// } else if s.contains('%') {
// Self::from_percentile(s)
// } else if s.contains('.') {
// Self::from_decimal(s)
// } else {
// let sat = Self(
// s.parse()
// .map_err(|source| ErrorKind::ParseInt { source }.error(s))?,
// );
// if sat > Self::LAST {
// Err(ErrorKind::IntegerRange.error(s))
// } else {
// Ok(sat)
// }
// }
// }
// }
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn n() {
// assert_eq!(Sat(1).n(), 1);
// assert_eq!(Sat(100).n(), 100);
// }
// #[test]
// fn height() {
// assert_eq!(Sat(0).height(), 0);
// assert_eq!(Sat(1).height(), 0);
// assert_eq!(Sat(Epoch(0).subsidy()).height(), 1);
// assert_eq!(Sat(Epoch(0).subsidy() * 2).height(), 2);
// assert_eq!(
// Epoch(2).starting_sat().height(),
// SUBSIDY_HALVING_INTERVAL * 2
// );
// assert_eq!(Sat(50 * COIN_VALUE).height(), 1);
// assert_eq!(Sat(2099999997689999).height(), 6929999);
// assert_eq!(Sat(2099999997689998).height(), 6929998);
// }
// #[test]
// fn name() {
// assert_eq!(Sat(0).name(), "nvtdijuwxlp");
// assert_eq!(Sat(1).name(), "nvtdijuwxlo");
// assert_eq!(Sat(26).name(), "nvtdijuwxkp");
// assert_eq!(Sat(27).name(), "nvtdijuwxko");
// assert_eq!(Sat(2099999997689999).name(), "a");
// assert_eq!(Sat(2099999997689999 - 1).name(), "b");
// assert_eq!(Sat(2099999997689999 - 25).name(), "z");
// assert_eq!(Sat(2099999997689999 - 26).name(), "aa");
// }
// #[test]
// fn number() {
// assert_eq!(Sat(2099999997689999).n(), 2099999997689999);
// }
// #[test]
// fn degree() {
// assert_eq!(Sat(0).degree().to_string(), "0°00″0‴");
// assert_eq!(Sat(1).degree().to_string(), "0°00″1‴");
// assert_eq!(
// Sat(50 * COIN_VALUE - 1).degree().to_string(),
// "0°00″4999999999‴"
// );
// assert_eq!(Sat(50 * COIN_VALUE).degree().to_string(), "0°11″0‴");
// assert_eq!(Sat(50 * COIN_VALUE + 1).degree().to_string(), "0°11″1‴");
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(DIFFCHANGE_INTERVAL) - 1)
// .degree()
// .to_string(),
// "0°20152015″4999999999‴"
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(DIFFCHANGE_INTERVAL))
// .degree()
// .to_string(),
// "0°20160″0‴"
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(DIFFCHANGE_INTERVAL) + 1)
// .degree()
// .to_string(),
// "0°20160″1‴"
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL) - 1)
// .degree()
// .to_string(),
// "0°209999335″4999999999‴"
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL))
// .degree()
// .to_string(),
// "0°0336″0‴"
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL) + 1)
// .degree()
// .to_string(),
// "0°0336″1‴"
// );
// assert_eq!(
// Sat(2067187500000000 - 1).degree().to_string(),
// "0°2099992015″156249999‴"
// );
// assert_eq!(Sat(2067187500000000).degree().to_string(), "1°00″0‴");
// assert_eq!(Sat(2067187500000000 + 1).degree().to_string(), "1°00″1‴");
// }
// #[test]
// fn invalid_degree_bugfix() {
// // Break glass in case of emergency:
// // for height in 0..(2 * CYCLE_EPOCHS * Epoch::BLOCKS) {
// // // 1054200000000000
// // let expected = Height(height).starting_sat();
// // // 0°16800″0‴
// // let degree = expected.degree();
// // // 2034637500000000
// // let actual = degree.to_string().parse::<Sat>().unwrap();
// // assert_eq!(
// // actual, expected,
// // "Sat at height {height} did not round-trip from degree {degree} successfully"
// // );
// // }
// assert_eq!(Sat(1054200000000000).degree().to_string(), "0°16800″0‴");
// assert_eq!(parse("0°16800″0‴").unwrap(), 1054200000000000);
// assert_eq!(
// Sat(1914226250000000).degree().to_string(),
// "0°122762794″0‴"
// );
// assert_eq!(parse("0°122762794″0‴").unwrap(), 1914226250000000);
// }
// #[test]
// fn period() {
// assert_eq!(Sat(0).period(), 0);
// assert_eq!(Sat(10080000000000).period(), 1);
// assert_eq!(Sat(2099999997689999).period(), 3437);
// assert_eq!(Sat(10075000000000).period(), 0);
// assert_eq!(Sat(10080000000000 - 1).period(), 0);
// assert_eq!(Sat(10080000000000).period(), 1);
// assert_eq!(Sat(10080000000000 + 1).period(), 1);
// assert_eq!(Sat(10085000000000).period(), 1);
// assert_eq!(Sat(2099999997689999).period(), 3437);
// }
// #[test]
// fn epoch() {
// assert_eq!(Sat(0).epoch(), 0);
// assert_eq!(Sat(1).epoch(), 0);
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL)).epoch(),
// 1
// );
// assert_eq!(Sat(2099999997689999).epoch(), 32);
// }
// #[test]
// fn epoch_position() {
// assert_eq!(Epoch(0).starting_sat().epoch_position(), 0);
// assert_eq!((Epoch(0).starting_sat() + 100).epoch_position(), 100);
// assert_eq!(Epoch(1).starting_sat().epoch_position(), 0);
// assert_eq!(Epoch(2).starting_sat().epoch_position(), 0);
// }
// #[test]
// fn subsidy_position() {
// assert_eq!(Sat(0).third(), 0);
// assert_eq!(Sat(1).third(), 1);
// assert_eq!(
// Sat(Height(0).subsidy() - 1).third(),
// Height(0).subsidy() - 1
// );
// assert_eq!(Sat(Height(0).subsidy()).third(), 0);
// assert_eq!(Sat(Height(0).subsidy() + 1).third(), 1);
// assert_eq!(
// Sat(Epoch(1).starting_sat().n() + Epoch(1).subsidy()).third(),
// 0
// );
// assert_eq!(Sat::LAST.third(), 0);
// }
// #[test]
// fn supply() {
// let mut mined = 0;
// for height in 0.. {
// let subsidy = Height(height).subsidy();
// if subsidy == 0 {
// break;
// }
// mined += subsidy;
// }
// assert_eq!(Sat::SUPPLY, mined);
// }
// #[test]
// fn last() {
// assert_eq!(Sat::LAST, Sat::SUPPLY - 1);
// }
// #[test]
// fn eq() {
// assert_eq!(Sat(0), 0);
// assert_eq!(Sat(1), 1);
// }
// #[test]
// fn partial_ord() {
// assert!(Sat(1) > 0);
// assert!(Sat(0) < 1);
// }
// #[test]
// fn add() {
// assert_eq!(Sat(0) + 1, 1);
// assert_eq!(Sat(1) + 100, 101);
// }
// #[test]
// fn add_assign() {
// let mut sat = Sat(0);
// sat += 1;
// assert_eq!(sat, 1);
// sat += 100;
// assert_eq!(sat, 101);
// }
// fn parse(s: &str) -> Result<Sat, String> {
// s.parse::<Sat>().map_err(|e| e.to_string())
// }
// #[test]
// fn from_str_decimal() {
// assert_eq!(parse("0.0").unwrap(), 0);
// assert_eq!(parse("0.1").unwrap(), 1);
// assert_eq!(parse("1.0").unwrap(), 50 * COIN_VALUE);
// assert_eq!(parse("6929999.0").unwrap(), 2099999997689999);
// assert!(parse("0.5000000000").is_err());
// assert!(parse("6930000.0").is_err());
// }
// #[test]
// fn from_str_degree() {
// assert_eq!(parse("0°00″0‴").unwrap(), 0);
// assert_eq!(parse("0°00″").unwrap(), 0);
// assert_eq!(parse("0°00″1‴").unwrap(), 1);
// assert_eq!(parse("0°20152015″0‴").unwrap(), 10075000000000);
// assert_eq!(parse("0°20160″0‴").unwrap(), 10080000000000);
// assert_eq!(parse("0°20171″0‴").unwrap(), 10085000000000);
// assert_eq!(parse("0°20160″1‴").unwrap(), 10080000000001);
// assert_eq!(parse("0°20171″1‴").unwrap(), 10085000000001);
// assert_eq!(parse("0°209999335″0‴").unwrap(), 1049995000000000);
// assert_eq!(parse("0°0336″0‴").unwrap(), 1050000000000000);
// assert_eq!(parse("0°0672″0‴").unwrap(), 1575000000000000);
// assert_eq!(parse("0°2099991007″0‴").unwrap(), 1837498750000000);
// assert_eq!(parse("0°01008″0‴").unwrap(), 1837500000000000);
// assert_eq!(parse("1°00″0‴").unwrap(), 2067187500000000);
// assert_eq!(parse("2°00″0‴").unwrap(), 2099487304530000);
// assert_eq!(parse("3°00″0‴").unwrap(), 2099991988080000);
// assert_eq!(parse("4°00″0‴").unwrap(), 2099999873370000);
// assert_eq!(parse("5°00″0‴").unwrap(), 2099999996220000);
// assert_eq!(parse("5°0336″0‴").unwrap(), 2099999997060000);
// assert_eq!(parse("5°0672″0‴").unwrap(), 2099999997480000);
// assert_eq!(parse("5°1673″0‴").unwrap(), 2099999997480001);
// assert_eq!(parse("5°2099991007″0‴").unwrap(), 2099999997689999);
// }
// #[test]
// fn from_str_number() {
// assert_eq!(parse("0").unwrap(), 0);
// assert_eq!(parse("2099999997689999").unwrap(), 2099999997689999);
// assert!(parse("2099999997690000").is_err());
// }
// #[test]
// fn from_str_degree_invalid_cycle_number() {
// assert!(parse("5°00″0‴").is_ok());
// assert!(parse("6°00″0‴").is_err());
// }
// #[test]
// fn from_str_degree_invalid_epoch_offset() {
// assert!(parse("0°209999335″0‴").is_ok());
// assert!(parse("0°210000336″0‴").is_err());
// }
// #[test]
// fn from_str_degree_invalid_period_offset() {
// assert!(parse("0°20152015″0‴").is_ok());
// assert!(parse("0°20162016″0‴").is_err());
// }
// #[test]
// fn from_str_degree_invalid_block_offset() {
// assert!(parse("0°00″4999999999‴").is_ok());
// assert!(parse("0°00″5000000000‴").is_err());
// assert!(parse("0°209999335″4999999999‴").is_ok());
// assert!(parse("0°0336″4999999999‴").is_err());
// }
// #[test]
// fn from_str_degree_invalid_period_block_relationship() {
// assert!(parse("0°20152015″0‴").is_ok());
// assert!(parse("0°20160″0‴").is_ok());
// assert!(parse("0°20161″0‴").is_err());
// assert!(parse("0°0336″0‴").is_ok());
// }
// #[test]
// fn from_str_degree_post_distribution() {
// assert!(parse("5°2099991007″0‴").is_ok());
// assert!(parse("5°01008″0‴").is_err());
// }
// #[test]
// fn from_str_name() {
// assert_eq!(parse("nvtdijuwxlp").unwrap(), 0);
// assert_eq!(parse("a").unwrap(), 2099999997689999);
// assert!(parse("(").is_err());
// assert!(parse("").is_err());
// assert!(parse("nvtdijuwxlq").is_err());
// assert!(parse("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa").is_err());
// }
// #[test]
// fn cycle() {
// assert_eq!(
// SUBSIDY_HALVING_INTERVAL * CYCLE_EPOCHS % DIFFCHANGE_INTERVAL,
// 0
// );
// for i in 1..CYCLE_EPOCHS {
// assert_ne!(i * SUBSIDY_HALVING_INTERVAL % DIFFCHANGE_INTERVAL, 0);
// }
// assert_eq!(
// CYCLE_EPOCHS * SUBSIDY_HALVING_INTERVAL % DIFFCHANGE_INTERVAL,
// 0
// );
// assert_eq!(Sat(0).cycle(), 0);
// assert_eq!(Sat(2067187500000000 - 1).cycle(), 0);
// assert_eq!(Sat(2067187500000000).cycle(), 1);
// assert_eq!(Sat(2067187500000000 + 1).cycle(), 1);
// }
// #[test]
// fn third() {
// assert_eq!(Sat(0).third(), 0);
// assert_eq!(Sat(50 * COIN_VALUE - 1).third(), 4999999999);
// assert_eq!(Sat(50 * COIN_VALUE).third(), 0);
// assert_eq!(Sat(50 * COIN_VALUE + 1).third(), 1);
// }
// #[test]
// fn percentile() {
// assert_eq!(Sat(0).percentile(), "0%");
// assert_eq!(Sat(Sat::LAST.n() / 2).percentile(), "49.99999999999998%");
// assert_eq!(Sat::LAST.percentile(), "100%");
// }
// #[test]
// fn from_percentile() {
// "-1%".parse::<Sat>().unwrap_err();
// "101%".parse::<Sat>().unwrap_err();
// }
// #[test]
// fn percentile_round_trip() {
// #[track_caller]
// fn case(n: u64) {
// let expected = Sat(n);
// let actual = expected.percentile().parse::<Sat>().unwrap();
// assert_eq!(expected, actual);
// }
// for n in 0..1024 {
// case(n);
// case(Sat::LAST.n() / 2 + n);
// case(Sat::LAST.n() - n);
// case(Sat::LAST.n() / (n + 1));
// }
// }
// #[test]
// fn common() {
// #[track_caller]
// fn case(n: u64) {
// assert_eq!(Sat(n).common(), Sat(n).rarity() == Rarity::Common);
// }
// case(0);
// case(1);
// case(50 * COIN_VALUE - 1);
// case(50 * COIN_VALUE);
// case(50 * COIN_VALUE + 1);
// case(2067187500000000 - 1);
// case(2067187500000000);
// case(2067187500000000 + 1);
// }
// #[test]
// fn common_fast_path() {
// // Exhaustively test the Sat::common() fast path on every
// // uncommon sat.
// for height in 0..Epoch::FIRST_POST_SUBSIDY.starting_height().0 {
// let height = Height(height);
// assert!(!Sat::common(height.starting_sat()));
// }
// }
// #[test]
// fn coin() {
// assert!(Sat(0).coin());
// assert!(!Sat(COIN_VALUE - 1).coin());
// assert!(Sat(COIN_VALUE).coin());
// assert!(!Sat(COIN_VALUE + 1).coin());
// }
// #[test]
// fn nineball() {
// for height in 0..10 {
// let sat = Sat(height * 50 * COIN_VALUE);
// assert_eq!(
// sat.nineball(),
// sat.height() == 9,
// "nineball: {} height: {}",
// sat.nineball(),
// sat.height()
// );
// }
// }
// #[test]
// fn error_display() {
// assert_eq!(
// Error {
// input: "foo".into(),
// kind: ErrorKind::Percentile
// }
// .to_string(),
// "failed to parse sat `foo`: invalid percentile",
// );
// }
// #[test]
// fn palindrome() {
// assert!(Sat(0).palindrome());
// assert!(!Sat(10).palindrome());
// assert!(Sat(11).palindrome());
// }
// #[test]
// fn palindrome_charm() {
// assert!(Charm::Palindrome.is_set(Sat(0).charms()));
// assert!(!Charm::Palindrome.is_set(Sat(10).charms()));
// assert!(Charm::Palindrome.is_set(Sat(11).charms()));
// }
// }

View File

@@ -0,0 +1,149 @@
use bitcoin::OutPoint;
/// A satpoint identifies the location of a sat in an output.
///
/// The string representation of a satpoint consists of that of an outpoint,
/// which identifies and output, followed by `:OFFSET`. For example, the string
/// representation of the first sat of the genesis block coinbase output is
/// `000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f:0:0`,
/// that of the second sat of the genesis block coinbase output is
/// `000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f:0:1`, and
/// so on and so on.
#[derive(Debug, PartialEq, Copy, Clone, Eq, PartialOrd, Ord, Default, Hash)]
pub struct SatPoint {
pub outpoint: OutPoint,
pub offset: u64,
}
// impl Display for SatPoint {
// fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write!(f, "{}:{}", self.outpoint, self.offset)
// }
// }
// impl Encodable for SatPoint {
// fn consensus_encode<S: bitcoin::io::Write + ?Sized>(
// &self,
// s: &mut S,
// ) -> Result<usize, bitcoin::io::Error> {
// let len = self.outpoint.consensus_encode(s)?;
// Ok(len + self.offset.consensus_encode(s)?)
// }
// }
// impl Decodable for SatPoint {
// fn consensus_decode<D: bitcoin::io::Read + ?Sized>(
// d: &mut D,
// ) -> Result<Self, bitcoin::consensus::encode::Error> {
// Ok(SatPoint {
// outpoint: Decodable::consensus_decode(d)?,
// offset: Decodable::consensus_decode(d)?,
// })
// }
// }
// impl FromStr for SatPoint {
// type Err = Error;
// fn from_str(s: &str) -> Result<Self, Self::Err> {
// let (outpoint, offset) = s.rsplit_once(':').ok_or_else(|| Error::Colon(s.into()))?;
// Ok(SatPoint {
// outpoint: outpoint
// .parse::<OutPoint>()
// .map_err(|err| Error::Outpoint {
// outpoint: outpoint.into(),
// err,
// })?,
// offset: offset.parse::<u64>().map_err(|err| Error::Offset {
// offset: offset.into(),
// err,
// })?,
// })
// }
// }
// #[derive(Debug, Error)]
// pub enum Error {
// #[error("satpoint `{0}` missing colon")]
// Colon(String),
// #[error("satpoint offset `{offset}` invalid: {err}")]
// Offset { offset: String, err: ParseIntError },
// #[error("satpoint outpoint `{outpoint}` invalid: {err}")]
// Outpoint {
// outpoint: String,
// err: ParseOutPointError,
// },
// }
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn error() {
// assert_eq!(
// "foo".parse::<SatPoint>().unwrap_err().to_string(),
// "satpoint `foo` missing colon"
// );
// assert_eq!(
// "foo:bar".parse::<SatPoint>().unwrap_err().to_string(),
// "satpoint outpoint `foo` invalid: OutPoint not in <txid>:<vout> format"
// );
// assert_eq!(
// "1111111111111111111111111111111111111111111111111111111111111111:1:bar"
// .parse::<SatPoint>()
// .unwrap_err()
// .to_string(),
// "satpoint offset `bar` invalid: invalid digit found in string"
// );
// }
// #[test]
// fn from_str_ok() {
// assert_eq!(
// "1111111111111111111111111111111111111111111111111111111111111111:1:1"
// .parse::<SatPoint>()
// .unwrap(),
// SatPoint {
// outpoint: "1111111111111111111111111111111111111111111111111111111111111111:1"
// .parse()
// .unwrap(),
// offset: 1,
// }
// );
// }
// #[test]
// fn from_str_err() {
// "abc".parse::<SatPoint>().unwrap_err();
// "abc:xyz".parse::<SatPoint>().unwrap_err();
// "1111111111111111111111111111111111111111111111111111111111111111:1"
// .parse::<SatPoint>()
// .unwrap_err();
// "1111111111111111111111111111111111111111111111111111111111111111:1:foo"
// .parse::<SatPoint>()
// .unwrap_err();
// }
// #[test]
// fn deserialize_ok() {
// assert_eq!(
// serde_json::from_str::<SatPoint>(
// "\"1111111111111111111111111111111111111111111111111111111111111111:1:1\""
// )
// .unwrap(),
// SatPoint {
// outpoint: "1111111111111111111111111111111111111111111111111111111111111111:1"
// .parse()
// .unwrap(),
// offset: 1,
// }
// );
// }
// }

102
components/ord/src/tag.rs Normal file
View File

@@ -0,0 +1,102 @@
use std::{collections::BTreeMap, mem};
use bitcoin::{constants::MAX_SCRIPT_ELEMENT_SIZE, script};
#[derive(Copy, Clone)]
#[repr(u8)]
pub(crate) enum Tag {
Pointer = 2,
#[allow(unused)]
Unbound = 66,
ContentType = 1,
Parent = 3,
Metadata = 5,
Metaprotocol = 7,
ContentEncoding = 9,
Delegate = 11,
Rune = 13,
#[allow(unused)]
Note = 15,
#[allow(unused)]
Nop = 255,
}
impl Tag {
fn chunked(self) -> bool {
matches!(self, Self::Metadata)
}
pub(crate) fn bytes(self) -> [u8; 1] {
[self as u8]
}
pub(crate) fn append(self, builder: &mut script::Builder, value: &Option<Vec<u8>>) {
if let Some(value) = value {
let mut tmp = script::Builder::new();
mem::swap(&mut tmp, builder);
if self.chunked() {
for chunk in value.chunks(MAX_SCRIPT_ELEMENT_SIZE) {
tmp = tmp
.push_slice::<&script::PushBytes>(self.bytes().as_slice().try_into().unwrap())
.push_slice::<&script::PushBytes>(chunk.try_into().unwrap());
}
} else {
tmp = tmp
.push_slice::<&script::PushBytes>(self.bytes().as_slice().try_into().unwrap())
.push_slice::<&script::PushBytes>(value.as_slice().try_into().unwrap());
}
mem::swap(&mut tmp, builder);
}
}
pub(crate) fn append_array(self, builder: &mut script::Builder, values: &Vec<Vec<u8>>) {
let mut tmp = script::Builder::new();
mem::swap(&mut tmp, builder);
for value in values {
tmp = tmp
.push_slice::<&script::PushBytes>(self.bytes().as_slice().try_into().unwrap())
.push_slice::<&script::PushBytes>(value.as_slice().try_into().unwrap());
}
mem::swap(&mut tmp, builder);
}
pub(crate) fn take(self, fields: &mut BTreeMap<&[u8], Vec<&[u8]>>) -> Option<Vec<u8>> {
if self.chunked() {
let value = fields.remove(self.bytes().as_slice())?;
if value.is_empty() {
None
} else {
Some(value.into_iter().flatten().cloned().collect())
}
} else {
let values = fields.get_mut(self.bytes().as_slice())?;
if values.is_empty() {
None
} else {
let value = values.remove(0).to_vec();
if values.is_empty() {
fields.remove(self.bytes().as_slice());
}
Some(value)
}
}
}
pub(crate) fn take_array(self, fields: &mut BTreeMap<&[u8], Vec<&[u8]>>) -> Vec<Vec<u8>> {
fields
.remove(self.bytes().as_slice())
.unwrap_or_default()
.into_iter()
.map(|v| v.to_vec())
.collect()
}
}

View File

@@ -10,6 +10,9 @@ path = "src/main.rs"
[dependencies]
ordhook = { path = "../ordhook-core" }
chainhook-types = { path = "../chainhook-types-rs" }
chainhook-sdk = { path = "../chainhook-sdk" }
hex = "0.4.3"
num_cpus = "1.16.0"
serde = "1"
serde_json = "1"

View File

@@ -1,16 +1,8 @@
use crate::config::file::ConfigFile;
use crate::config::generator::generate_config;
use chainhook_sdk::utils::{BlockHeights, Context};
use clap::{Parser, Subcommand};
use hiro_system_kit;
use ordhook::chainhook_sdk::chainhooks::types::{
BitcoinChainhookSpecification, HttpHook, InscriptionFeedData, OrdinalsMetaProtocol,
};
use ordhook::chainhook_sdk::chainhooks::types::{
BitcoinPredicateType, HookAction, OrdinalOperations,
};
use ordhook::chainhook_sdk::utils::BlockHeights;
use ordhook::chainhook_sdk::utils::Context;
use ordhook::config::Config;
use ordhook::core::first_inscription_height;
use ordhook::core::pipeline::bitcoind_download_blocks;
use ordhook::core::pipeline::processors::block_archiving::start_block_archiving_processor;
@@ -22,7 +14,6 @@ use ordhook::db::cursor::BlockBytesCursor;
use ordhook::db::{migrate_dbs, reset_dbs};
use ordhook::service::Service;
use ordhook::try_info;
use std::collections::HashSet;
use std::path::PathBuf;
use std::thread::sleep;
use std::time::Duration;
@@ -383,7 +374,7 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
info!(ctx.expect_logger(), "--------------------");
info!(ctx.expect_logger(), "Block: {i}");
for tx in block.iter_tx() {
info!(ctx.expect_logger(), "Tx: {}", ordhook::hex::encode(tx.txid));
info!(ctx.expect_logger(), "Tx: {}", hex::encode(tx.txid));
}
}
}
@@ -440,52 +431,3 @@ async fn handle_command(opts: Opts, ctx: &Context) -> Result<(), String> {
}
Ok(())
}
pub fn build_predicate_from_cli(
config: &Config,
post_to: &str,
block_heights: Option<&BlockHeights>,
start_block: Option<u64>,
auth_token: Option<String>,
is_streaming: bool,
) -> Result<BitcoinChainhookSpecification, String> {
// Retrieve last block height known, and display it
let (start_block, end_block, blocks) = match (start_block, block_heights) {
(None, Some(BlockHeights::BlockRange(start, end))) => (Some(*start), Some(*end), None),
(None, Some(BlockHeights::Blocks(blocks))) => (None, None, Some(blocks.clone())),
(Some(start), None) => (Some(start), None, None),
_ => unreachable!(),
};
let mut meta_protocols: Option<HashSet<OrdinalsMetaProtocol>> = None;
if config.meta_protocols.brc20 {
let mut meta = HashSet::<OrdinalsMetaProtocol>::new();
meta.insert(OrdinalsMetaProtocol::All);
meta_protocols = Some(meta.clone());
}
let predicate = BitcoinChainhookSpecification {
network: config.network.bitcoin_network.clone(),
uuid: post_to.to_string(),
owner_uuid: None,
name: post_to.to_string(),
version: 1,
start_block,
end_block,
blocks,
expire_after_occurrence: None,
include_proof: false,
include_inputs: false,
include_outputs: false,
include_witness: false,
expired_at: None,
enabled: is_streaming,
predicate: BitcoinPredicateType::OrdinalsProtocol(OrdinalOperations::InscriptionFeed(
InscriptionFeedData { meta_protocols },
)),
action: HookAction::HttpPost(HttpHook {
url: post_to.to_string(),
authorization_header: format!("Bearer {}", auth_token.unwrap_or("".to_string())),
}),
};
Ok(predicate)
}

View File

@@ -1,7 +1,4 @@
use ordhook::chainhook_sdk::observer::DEFAULT_INGESTION_PORT;
use ordhook::chainhook_sdk::types::{
BitcoinBlockSignaling, BitcoinNetwork, StacksNetwork, StacksNodeConfig,
};
use chainhook_types::{BitcoinBlockSignaling, BitcoinNetwork};
use ordhook::config::{
Config, IndexerConfig, LogConfig, MetaProtocolsConfig, ResourcesConfig, SnapshotConfig,
SnapshotConfigDownloadUrls, StorageConfig, DEFAULT_BITCOIND_RPC_THREADS,
@@ -44,11 +41,11 @@ impl ConfigFile {
}
pub fn from_config_file(config_file: ConfigFile) -> Result<Config, String> {
let (_, bitcoin_network) = match config_file.network.mode.as_str() {
"devnet" => (StacksNetwork::Devnet, BitcoinNetwork::Regtest),
"testnet" => (StacksNetwork::Testnet, BitcoinNetwork::Testnet),
"mainnet" => (StacksNetwork::Mainnet, BitcoinNetwork::Mainnet),
"signet" => (StacksNetwork::Testnet, BitcoinNetwork::Signet),
let bitcoin_network = match config_file.network.mode.as_str() {
"devnet" => BitcoinNetwork::Regtest,
"testnet" => BitcoinNetwork::Testnet,
"mainnet" => BitcoinNetwork::Mainnet,
"signet" => BitcoinNetwork::Signet,
_ => return Err("network.mode not supported".to_string()),
};
@@ -126,9 +123,7 @@ impl ConfigFile {
bitcoind_rpc_password: config_file.network.bitcoind_rpc_password.to_string(),
bitcoin_block_signaling: match config_file.network.bitcoind_zmq_url {
Some(ref zmq_url) => BitcoinBlockSignaling::ZeroMQ(zmq_url.clone()),
None => BitcoinBlockSignaling::Stacks(StacksNodeConfig::default_localhost(
DEFAULT_INGESTION_PORT,
)),
None => BitcoinBlockSignaling::ZeroMQ("".to_string()),
},
bitcoin_network,
prometheus_monitoring_port: config_file.network.prometheus_monitoring_port,

View File

@@ -1,4 +1,4 @@
use ordhook::chainhook_sdk::types::BitcoinNetwork;
use chainhook_types::BitcoinNetwork;
pub fn generate_config(network: &BitcoinNetwork) -> String {
let network = format!("{:?}", network);

View File

@@ -9,10 +9,11 @@ serde = "1"
serde_json = "1"
serde_derive = "1"
hex = "0.4.3"
rand = "0.8.5"
lru = "0.12.3"
chainhook-sdk = { version = "=0.12.10", features = ["zeromq"] }
# chainhook-sdk = { version = "=0.12.10", path = "../../../chainhook/components/chainhook-sdk", features = ["zeromq"] }
rand = "0.9.0"
lru = "0.13.0"
bitcoin = { workspace = true }
chainhook-sdk = { path = "../chainhook-sdk" }
chainhook-types = { path = "../chainhook-types-rs" }
hiro-system-kit = "0.3.1"
reqwest = { version = "0.11", default-features = false, features = [
"stream",
@@ -27,12 +28,10 @@ flume = "0.11.0"
ansi_term = "0.12.1"
atty = "0.2.14"
crossbeam-channel = "0.5.8"
uuid = { version = "1.3.0", features = ["v4", "fast-rng"] }
threadpool = "1.8.1"
dashmap = "5.4.0"
fxhash = "0.2.1"
anyhow = { version = "1.0.56", features = ["backtrace"] }
schemars = { version = "0.8.16", git = "https://github.com/hirosystems/schemars.git", branch = "feat-chainhook-fixes" }
progressing = '3'
futures = "0.3.28"
rocksdb = { version = "0.21.0", default-features = false, features = [
@@ -41,12 +40,14 @@ rocksdb = { version = "0.21.0", default-features = false, features = [
pprof = { version = "0.14.0", features = ["flamegraph"], optional = true }
hyper = { version = "=0.14.27" }
lazy_static = { version = "1.4.0" }
ciborium = "0.2.1"
regex = "1.10.3"
prometheus = "0.13.3"
chainhook-postgres = { path = "../chainhook-postgres" }
refinery = { version = "0.8", features = ["tokio-postgres"] }
tokio-postgres = { workspace = true }
deadpool-postgres = { workspace = true }
refinery = { workspace = true }
maplit = "1.0.2"
ord = { path = "../ord" }
[dev-dependencies]
test-case = "3.1.0"

View File

@@ -1,8 +1,6 @@
pub use chainhook_postgres::PgConnectionConfig;
use chainhook_sdk::observer::EventObserverConfig;
use chainhook_sdk::types::{
BitcoinBlockSignaling, BitcoinNetwork, StacksNetwork, StacksNodeConfig,
};
use chainhook_types::{BitcoinBlockSignaling, BitcoinNetwork};
use std::path::PathBuf;
const DEFAULT_MAINNET_ORDINALS_SQLITE_ARCHIVE: &str =
@@ -101,19 +99,11 @@ impl ResourcesConfig {
impl Config {
pub fn get_event_observer_config(&self) -> EventObserverConfig {
EventObserverConfig {
bitcoin_rpc_proxy_enabled: true,
chainhook_config: None,
ingestion_port: DEFAULT_INGESTION_PORT,
bitcoind_rpc_username: self.network.bitcoind_rpc_username.clone(),
bitcoind_rpc_password: self.network.bitcoind_rpc_password.clone(),
bitcoind_rpc_url: self.network.bitcoind_rpc_url.clone(),
bitcoin_block_signaling: self.network.bitcoin_block_signaling.clone(),
display_logs: false,
cache_path: self.storage.working_dir.clone(),
bitcoin_network: self.network.bitcoin_network.clone(),
stacks_network: StacksNetwork::Devnet,
prometheus_monitoring_port: None,
data_handler_tx: None,
}
}
@@ -166,8 +156,8 @@ impl Config {
bitcoind_rpc_url: "http://0.0.0.0:18443".into(),
bitcoind_rpc_username: "devnet".into(),
bitcoind_rpc_password: "devnet".into(),
bitcoin_block_signaling: BitcoinBlockSignaling::Stacks(
StacksNodeConfig::default_localhost(DEFAULT_INGESTION_PORT),
bitcoin_block_signaling: BitcoinBlockSignaling::ZeroMQ(
"http://0.0.0.0:18543".into(),
),
bitcoin_network: BitcoinNetwork::Regtest,
prometheus_monitoring_port: None,
@@ -210,8 +200,8 @@ impl Config {
bitcoind_rpc_url: "http://0.0.0.0:18332".into(),
bitcoind_rpc_username: "devnet".into(),
bitcoind_rpc_password: "devnet".into(),
bitcoin_block_signaling: BitcoinBlockSignaling::Stacks(
StacksNodeConfig::default_localhost(DEFAULT_INGESTION_PORT),
bitcoin_block_signaling: BitcoinBlockSignaling::ZeroMQ(
"http://0.0.0.0:18543".into(),
),
bitcoin_network: BitcoinNetwork::Testnet,
prometheus_monitoring_port: Some(9153),
@@ -257,8 +247,8 @@ impl Config {
bitcoind_rpc_url: "http://0.0.0.0:8332".into(),
bitcoind_rpc_username: "devnet".into(),
bitcoind_rpc_password: "devnet".into(),
bitcoin_block_signaling: BitcoinBlockSignaling::Stacks(
StacksNodeConfig::default_localhost(DEFAULT_INGESTION_PORT),
bitcoin_block_signaling: BitcoinBlockSignaling::ZeroMQ(
"http://0.0.0.0:18543".into(),
),
bitcoin_network: BitcoinNetwork::Mainnet,
prometheus_monitoring_port: Some(9153),

View File

@@ -1,15 +1,15 @@
use std::collections::HashMap;
use chainhook_postgres::{
deadpool_postgres::GenericClient,
tokio_postgres::{types::ToSql, Client},
types::{PgNumericU128, PgNumericU64},
utils, FromPgRow, BATCH_QUERY_CHUNK_SIZE,
};
use chainhook_sdk::types::{
use chainhook_types::{
BitcoinBlockData, Brc20BalanceData, Brc20Operation, Brc20TokenDeployData, Brc20TransferData,
};
use refinery::embed_migrations;
use deadpool_postgres::GenericClient;
use tokio_postgres::{types::ToSql, Client};
use super::{
models::{DbOperation, DbToken},
@@ -574,12 +574,12 @@ pub async fn rollback_block_operations<T: GenericClient>(
#[cfg(test)]
mod test {
use deadpool_postgres::GenericClient;
use chainhook_postgres::{
deadpool_postgres::GenericClient,
pg_begin, pg_pool_client,
types::{PgBigIntU32, PgNumericU128, PgNumericU64, PgSmallIntU8},
};
use chainhook_sdk::types::{
use chainhook_types::{
BlockIdentifier, OrdinalInscriptionTransferDestination, TransactionIdentifier,
};

View File

@@ -3,14 +3,12 @@ use std::{
num::NonZeroUsize,
};
use chainhook_postgres::{
deadpool_postgres::GenericClient,
types::{PgBigIntU32, PgNumericU128, PgNumericU64, PgSmallIntU8},
};
use chainhook_sdk::types::{
use chainhook_postgres::types::{PgBigIntU32, PgNumericU128, PgNumericU64, PgSmallIntU8};
use chainhook_types::{
BlockIdentifier, OrdinalInscriptionRevealData, OrdinalInscriptionTransferData,
TransactionIdentifier,
};
use deadpool_postgres::GenericClient;
use lru::LruCache;
use maplit::hashmap;
@@ -491,7 +489,7 @@ impl Brc20MemoryCache {
#[cfg(test)]
mod test {
use chainhook_postgres::{pg_begin, pg_pool_client};
use chainhook_sdk::types::{BitcoinNetwork, BlockIdentifier, TransactionIdentifier};
use chainhook_types::{BitcoinNetwork, BlockIdentifier, TransactionIdentifier};
use test_case::test_case;
use crate::{

View File

@@ -1,13 +1,11 @@
use std::collections::HashMap;
use chainhook_postgres::deadpool_postgres::Transaction;
use chainhook_sdk::{
types::{
BitcoinBlockData, BlockIdentifier, Brc20BalanceData, Brc20Operation, Brc20TokenDeployData,
Brc20TransferData, OrdinalInscriptionTransferData, OrdinalOperation, TransactionIdentifier,
},
utils::Context,
use chainhook_sdk::utils::Context;
use chainhook_types::{
BitcoinBlockData, BlockIdentifier, Brc20BalanceData, Brc20Operation, Brc20TokenDeployData,
Brc20TransferData, OrdinalInscriptionTransferData, OrdinalOperation, TransactionIdentifier,
};
use deadpool_postgres::Transaction;
use crate::{core::meta_protocols::brc20::u128_amount_to_decimals_str, try_info};
@@ -266,7 +264,7 @@ mod test {
use std::collections::HashMap;
use chainhook_postgres::{pg_begin, pg_pool_client};
use chainhook_sdk::types::{
use chainhook_types::{
Brc20BalanceData, Brc20Operation, Brc20TokenDeployData, Brc20TransferData,
OrdinalInscriptionTransferDestination, OrdinalOperation,
};

View File

@@ -1,4 +1,4 @@
use chainhook_sdk::types::BitcoinNetwork;
use chainhook_types::BitcoinNetwork;
pub mod brc20_pg;
pub mod cache;

View File

@@ -1,8 +1,8 @@
use chainhook_postgres::{
tokio_postgres::Row,
types::{PgBigIntU32, PgNumericU128, PgNumericU64},
FromPgRow,
};
use tokio_postgres::Row;
#[derive(Debug, Clone)]
pub struct DbOperation {

View File

@@ -1,8 +1,8 @@
use chainhook_postgres::{
tokio_postgres::Row,
types::{PgBigIntU32, PgNumericU128, PgNumericU64, PgSmallIntU8},
FromPgRow,
};
use tokio_postgres::Row;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DbToken {

View File

@@ -1,5 +1,5 @@
use crate::ord::inscription::Inscription;
use crate::ord::media::{Language, Media};
use ord::inscription::Inscription;
use ord::media::{Language, Media};
#[derive(PartialEq, Debug, Clone)]
pub struct ParsedBrc20TokenDeployData {
@@ -59,7 +59,8 @@ pub fn amt_has_valid_decimals(amt: &str, max_decimals: u8) -> bool {
}
fn parse_float_numeric_value(n: &str, max_decimals: u8) -> Option<f64> {
if n.chars().all(|c| c.is_ascii_digit() || c == '.') && !n.starts_with('.') && !n.ends_with('.') {
if n.chars().all(|c| c.is_ascii_digit() || c == '.') && !n.starts_with('.') && !n.ends_with('.')
{
if !amt_has_valid_decimals(n, max_decimals) {
return None;
}
@@ -147,14 +148,16 @@ pub fn parse_brc20_operation(
} else {
limit = max.clone();
}
return Ok(Some(ParsedBrc20Operation::Deploy(ParsedBrc20TokenDeployData {
tick: json.tick.to_lowercase(),
display_tick: json.tick.clone(),
max,
lim: limit,
dec: decimals.to_string(),
self_mint,
})));
return Ok(Some(ParsedBrc20Operation::Deploy(
ParsedBrc20TokenDeployData {
tick: json.tick.to_lowercase(),
display_tick: json.tick.clone(),
max,
lim: limit,
dec: decimals.to_string(),
self_mint,
},
)));
}
Err(_) => match serde_json::from_slice::<Brc20MintOrTransferJson>(inscription_body) {
Ok(json) => {
@@ -201,10 +204,10 @@ pub fn parse_brc20_operation(
#[cfg(test)]
mod test {
use super::{parse_brc20_operation, ParsedBrc20Operation};
use crate::{
core::meta_protocols::brc20::parser::{ParsedBrc20BalanceData, ParsedBrc20TokenDeployData},
ord::inscription::Inscription,
use crate::core::meta_protocols::brc20::parser::{
ParsedBrc20BalanceData, ParsedBrc20TokenDeployData,
};
use ord::inscription::Inscription;
use test_case::test_case;
struct InscriptionBuilder {
@@ -241,7 +244,8 @@ mod test {
incomplete_field: false,
metadata: None,
metaprotocol: None,
parent: None,
parents: vec![],
rune: None,
pointer: None,
unrecognized_even_field: false,
delegate: None,

View File

@@ -1,9 +1,7 @@
use chainhook_sdk::{
types::{
OrdinalInscriptionNumber, OrdinalInscriptionRevealData, OrdinalInscriptionTransferData,
OrdinalInscriptionTransferDestination,
},
utils::Context,
use chainhook_sdk::utils::Context;
use chainhook_types::{
OrdinalInscriptionCharms, OrdinalInscriptionNumber, OrdinalInscriptionRevealData,
OrdinalInscriptionTransferData, OrdinalInscriptionTransferDestination,
};
pub fn get_test_ctx() -> Context {
@@ -20,7 +18,7 @@ pub struct Brc20RevealBuilder {
pub inscriber_address: Option<String>,
pub inscription_id: String,
pub ordinal_number: u64,
pub parent: Option<String>,
pub parents: Vec<String>,
}
impl Brc20RevealBuilder {
@@ -34,7 +32,7 @@ impl Brc20RevealBuilder {
inscription_id: "9bb2314d666ae0b1db8161cb373fcc1381681f71445c4e0335aa80ea9c37fcddi0"
.to_string(),
ordinal_number: 0,
parent: None,
parents: vec![],
}
}
@@ -61,8 +59,8 @@ impl Brc20RevealBuilder {
self
}
pub fn parent(mut self, val: Option<String>) -> Self {
self.parent = val;
pub fn parents(mut self, val: Vec<String>) -> Self {
self.parents = val;
self
}
@@ -81,7 +79,7 @@ impl Brc20RevealBuilder {
delegate: None,
metaprotocol: None,
metadata: None,
parent: self.parent,
parents: self.parents,
ordinal_number: self.ordinal_number,
ordinal_block_height: 767430,
ordinal_offset: 0,
@@ -90,6 +88,7 @@ impl Brc20RevealBuilder {
satpoint_post_inscription:
"9bb2314d666ae0b1db8161cb373fcc1381681f71445c4e0335aa80ea9c37fcdd:0:0".to_string(),
curse_type: None,
charms: OrdinalInscriptionCharms::none(),
}
}
}
@@ -108,7 +107,8 @@ impl Brc20TransferBuilder {
destination: OrdinalInscriptionTransferDestination::Transferred(
"bc1pls75sfwullhygkmqap344f5cqf97qz95lvle6fvddm0tpz2l5ffslgq3m0".to_string(),
),
satpoint_post_transfer: "e45957c419f130cd5c88cdac3eb1caf2d118aee20c17b15b74a611be395a065d:0:0".to_string(),
satpoint_post_transfer:
"e45957c419f130cd5c88cdac3eb1caf2d118aee20c17b15b74a611be395a065d:0:0".to_string(),
tx_index: 0,
}
}

View File

@@ -1,11 +1,11 @@
use std::collections::HashMap;
use chainhook_postgres::deadpool_postgres::Transaction;
use chainhook_sdk::types::{
use chainhook_types::{
BitcoinNetwork, BlockIdentifier, OrdinalInscriptionRevealData, OrdinalInscriptionTransferData,
OrdinalInscriptionTransferDestination, TransactionIdentifier,
};
use chainhook_sdk::utils::Context;
use deadpool_postgres::Transaction;
use crate::try_debug;
@@ -106,7 +106,7 @@ pub async fn verify_brc20_operation(
return Ok(None);
};
if data.tick.len() == 5 {
let Some(parent) = &reveal.parent else {
if reveal.parents.len() == 0 {
try_debug!(
ctx,
"BRC-20: Attempting to mint self-minted token {} without a parent ref",
@@ -114,7 +114,7 @@ pub async fn verify_brc20_operation(
);
return Ok(None);
};
if parent != &token.inscription_id {
if !reveal.parents.contains(&token.inscription_id) {
try_debug!(
ctx,
"BRC-20: Mint attempt for self-minted token {} does not point to deploy as parent",
@@ -297,7 +297,7 @@ pub async fn verify_brc20_transfers(
#[cfg(test)]
mod test {
use chainhook_postgres::{pg_begin, pg_pool_client};
use chainhook_sdk::types::{
use chainhook_types::{
BitcoinNetwork, BlockIdentifier, OrdinalInscriptionRevealData,
OrdinalInscriptionTransferData, OrdinalInscriptionTransferDestination,
TransactionIdentifier,
@@ -587,7 +587,7 @@ mod test {
tick: "$pepe".to_string(),
amt: "100.00".to_string(),
}),
Brc20RevealBuilder::new().inscription_number(1).parent(Some("test".to_string())).build()
Brc20RevealBuilder::new().inscription_number(1).parents(vec!["test".to_string()]).build()
=> Ok(None);
"with mint with wrong parent pointer"
)]
@@ -598,7 +598,7 @@ mod test {
}),
Brc20RevealBuilder::new()
.inscription_number(1)
.parent(Some("9bb2314d666ae0b1db8161cb373fcc1381681f71445c4e0335aa80ea9c37fcddi0".to_string()))
.parents(vec!["9bb2314d666ae0b1db8161cb373fcc1381681f71445c4e0335aa80ea9c37fcddi0".to_string()])
.build()
=> Ok(Some(VerifiedBrc20Operation::TokenMint(VerifiedBrc20BalanceData {
tick: "$pepe".to_string(),

View File

@@ -10,7 +10,8 @@ use fxhash::{FxBuildHasher, FxHasher};
use std::hash::BuildHasherDefault;
use std::ops::Div;
use chainhook_sdk::{types::BitcoinNetwork, utils::Context};
use chainhook_sdk::utils::Context;
use chainhook_types::BitcoinNetwork;
use crate::{
config::Config,
@@ -125,8 +126,7 @@ pub async fn should_sync_rocks_db(
let blocks_db = open_blocks_db_with_retry(true, &config, &ctx);
let last_compressed_block = find_last_block_inserted(&blocks_db) as u64;
let ord_client = pg_pool_client(&pg_pools.ordinals).await?;
let last_indexed_block = match ordinals_pg::get_chain_tip_block_height(&ord_client).await?
{
let last_indexed_block = match ordinals_pg::get_chain_tip_block_height(&ord_client).await? {
Some(last_indexed_block) => last_indexed_block,
None => 0,
};
@@ -148,8 +148,7 @@ pub async fn should_sync_ordinals_db(
let mut start_block = find_last_block_inserted(&blocks_db) as u64;
let ord_client = pg_pool_client(&pg_pools.ordinals).await?;
match ordinals_pg::get_chain_tip_block_height(&ord_client).await?
{
match ordinals_pg::get_chain_tip_block_height(&ord_client).await? {
Some(height) => {
if find_pinned_block_bytes_at_block_height(height as u32, 3, &blocks_db, &ctx).is_none()
{

View File

@@ -1,7 +1,7 @@
pub mod processors;
use chainhook_sdk::observer::BitcoinConfig;
use chainhook_sdk::types::BitcoinBlockData;
use chainhook_types::BitcoinBlockData;
use chainhook_sdk::utils::Context;
use crossbeam_channel::bounded;
use std::collections::{HashMap, VecDeque};

View File

@@ -1,4 +1,5 @@
use chainhook_sdk::{types::BitcoinBlockData, utils::Context};
use chainhook_sdk::utils::Context;
use chainhook_types::BitcoinBlockData;
use crossbeam_channel::{Sender, TryRecvError};
use rocksdb::DB;
use std::{

View File

@@ -6,10 +6,8 @@ use std::{
};
use chainhook_postgres::{pg_begin, pg_pool_client};
use chainhook_sdk::{
types::{BitcoinBlockData, TransactionIdentifier},
utils::Context,
};
use chainhook_sdk::utils::Context;
use chainhook_types::{BitcoinBlockData, TransactionIdentifier};
use crossbeam_channel::{Sender, TryRecvError};
use dashmap::DashMap;

View File

@@ -1,12 +1,13 @@
use chainhook_sdk::bitcoincore_rpc_json::bitcoin::Txid;
use bitcoin::hash_types::Txid;
use bitcoin::Witness;
use chainhook_sdk::indexer::bitcoin::BitcoinTransactionFullBreakdown;
use chainhook_sdk::indexer::bitcoin::{standardize_bitcoin_block, BitcoinBlockFullBreakdown};
use chainhook_sdk::types::{
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData, BlockIdentifier,
OrdinalInscriptionCurseType, OrdinalInscriptionNumber, OrdinalInscriptionRevealData,
OrdinalInscriptionTransferData, OrdinalOperation,
};
use chainhook_sdk::utils::Context;
use chainhook_types::{
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData, BlockIdentifier,
OrdinalInscriptionCharms, OrdinalInscriptionCurseType, OrdinalInscriptionNumber,
OrdinalInscriptionRevealData, OrdinalInscriptionTransferData, OrdinalOperation,
};
use serde_json::json;
use std::collections::{BTreeMap, HashMap};
use std::str::FromStr;
@@ -14,11 +15,11 @@ use std::str::FromStr;
use crate::config::Config;
use crate::core::meta_protocols::brc20::brc20_activation_height;
use crate::core::meta_protocols::brc20::parser::{parse_brc20_operation, ParsedBrc20Operation};
use crate::ord::envelope::{Envelope, ParsedEnvelope, RawEnvelope};
use crate::ord::inscription::Inscription;
use crate::ord::inscription_id::InscriptionId;
use crate::try_warn;
use {chainhook_sdk::bitcoincore_rpc::bitcoin::Witness, std::str};
use ord::envelope::{Envelope, ParsedEnvelope};
use ord::inscription::Inscription;
use ord::inscription_id::InscriptionId;
use std::str;
pub fn parse_inscriptions_from_witness(
input_index: usize,
@@ -27,7 +28,7 @@ pub fn parse_inscriptions_from_witness(
) -> Option<Vec<(OrdinalInscriptionRevealData, Inscription)>> {
let witness = Witness::from_slice(&witness_bytes);
let tapscript = witness.tapscript()?;
let envelopes: Vec<Envelope<Inscription>> = RawEnvelope::from_tapscript(tapscript, input_index)
let envelopes: Vec<Envelope<Inscription>> = Envelope::from_tapscript(tapscript, input_index)
.ok()?
.into_iter()
.map(|e| ParsedEnvelope::from(e))
@@ -64,7 +65,12 @@ pub fn parse_inscriptions_from_witness(
let mut content_bytes = "0x".to_string();
content_bytes.push_str(&hex::encode(&inscription_content_bytes));
let parent = envelope.payload.parent().and_then(|i| Some(i.to_string()));
let parents = envelope
.payload
.parents()
.iter()
.map(|i| i.to_string())
.collect();
let delegate = envelope
.payload
.delegate()
@@ -75,12 +81,9 @@ pub fn parse_inscriptions_from_witness(
.and_then(|p| Some(p.to_string()));
let metadata = envelope.payload.metadata().and_then(|m| Some(json!(m)));
// Most of these fields will be calculated later when we know for certain which satoshi contains this inscription.
let reveal_data = OrdinalInscriptionRevealData {
content_type: envelope
.payload
.content_type()
.unwrap_or("unknown")
.to_string(),
content_type: envelope.payload.content_type().unwrap_or("").to_string(),
content_bytes,
content_length: inscription_content_bytes.len(),
inscription_id: inscription_id.to_string(),
@@ -91,7 +94,7 @@ pub fn parse_inscriptions_from_witness(
inscription_fee: 0,
inscription_number: OrdinalInscriptionNumber::zero(),
inscriber_address: None,
parent,
parents,
delegate,
metaprotocol,
metadata,
@@ -101,6 +104,7 @@ pub fn parse_inscriptions_from_witness(
transfers_pre_inscription: 0,
satpoint_post_inscription: format!(""),
curse_type,
charms: OrdinalInscriptionCharms::none(),
};
inscriptions.push((reveal_data, envelope.payload));
}
@@ -246,21 +250,19 @@ pub fn get_inscriptions_transferred_in_block(
mod test {
use std::collections::HashMap;
use bitcoin::Amount;
use chainhook_sdk::{
bitcoin::Amount,
indexer::bitcoin::{
BitcoinBlockFullBreakdown, BitcoinTransactionFullBreakdown,
BitcoinTransactionInputFullBreakdown, BitcoinTransactionInputPrevoutFullBreakdown,
GetRawTransactionResultVinScriptSig,
},
types::{
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData,
OrdinalInscriptionTransferData, OrdinalInscriptionTransferDestination,
OrdinalOperation,
},
utils::Context,
};
use chainhook_types::{
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData, OrdinalInscriptionTransferData,
OrdinalInscriptionTransferDestination, OrdinalOperation,
};
use test_case::test_case;
use crate::{

View File

@@ -4,28 +4,26 @@ use std::{
sync::Arc,
};
use chainhook_postgres::deadpool_postgres::Transaction;
use chainhook_sdk::{
bitcoincore_rpc_json::bitcoin::Network,
types::{
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData, BlockIdentifier,
OrdinalInscriptionCurseType, OrdinalInscriptionTransferDestination, OrdinalOperation,
TransactionIdentifier,
},
utils::Context,
use bitcoin::Network;
use chainhook_sdk::utils::Context;
use chainhook_types::{
BitcoinBlockData, BitcoinNetwork, BitcoinTransactionData, BlockIdentifier,
OrdinalInscriptionCurseType, OrdinalInscriptionTransferDestination, OrdinalOperation,
TransactionIdentifier,
};
use crossbeam_channel::unbounded;
use dashmap::DashMap;
use deadpool_postgres::Transaction;
use fxhash::FxHasher;
use crate::{
config::Config,
core::resolve_absolute_pointer,
db::{self, cursor::TransactionBytesCursor, ordinals_pg},
ord::height::Height,
try_debug, try_error, try_info,
utils::format_inscription_id,
};
use ord::height::Height;
use std::sync::mpsc::channel;
@@ -407,7 +405,7 @@ pub async fn augment_block_with_inscriptions(
let mut sats_overflows = VecDeque::new();
let network = get_bitcoin_network(&block.metadata.network);
let coinbase_subsidy = Height(block.block_identifier.index).subsidy();
let coinbase_subsidy = Height(block.block_identifier.index as u32).subsidy();
let coinbase_tx = &block.transactions[0].clone();
let mut cumulated_fees = 0u64;

View File

@@ -1,5 +1,5 @@
use chainhook_sdk::types::{BlockIdentifier, OrdinalInscriptionNumber, TransactionIdentifier};
use chainhook_sdk::utils::Context;
use chainhook_types::{BlockIdentifier, OrdinalInscriptionNumber, TransactionIdentifier};
use dashmap::DashMap;
use fxhash::FxHasher;
use std::hash::BuildHasherDefault;
@@ -9,9 +9,9 @@ use crate::config::Config;
use crate::db::blocks::find_pinned_block_bytes_at_block_height;
use crate::db::cursor::{BlockBytesCursor, TransactionBytesCursor};
use crate::ord::height::Height;
use crate::ord::sat::Sat;
use crate::try_error;
use ord::height::Height;
use ord::sat::Sat;
#[derive(Clone, Debug)]
pub struct TraversalResult {
@@ -25,7 +25,7 @@ pub struct TraversalResult {
impl TraversalResult {
pub fn get_ordinal_coinbase_height(&self) -> u64 {
let sat = Sat(self.ordinal_number);
sat.height().n()
sat.height().n() as u64
}
pub fn get_ordinal_coinbase_offset(&self) -> u64 {
@@ -313,10 +313,8 @@ pub fn compute_satoshi_number(
mod test {
use std::{hash::BuildHasherDefault, sync::Arc};
use chainhook_sdk::{
types::{bitcoin::TxOut, BlockIdentifier, TransactionIdentifier},
utils::Context,
};
use chainhook_sdk::utils::Context;
use chainhook_types::{bitcoin::TxOut, BlockIdentifier, TransactionIdentifier};
use dashmap::DashMap;
use fxhash::FxHasher;

View File

@@ -1,21 +1,20 @@
use std::collections::HashSet;
use chainhook_postgres::deadpool_postgres::Transaction;
use chainhook_sdk::{
bitcoincore_rpc_json::bitcoin::{Address, Network, ScriptBuf},
types::{
BitcoinBlockData, BitcoinTransactionData, BlockIdentifier, OrdinalInscriptionTransferData, OrdinalInscriptionTransferDestination, OrdinalOperation
},
utils::Context,
use bitcoin::{Address, Network, ScriptBuf};
use chainhook_sdk::utils::Context;
use chainhook_types::{
BitcoinBlockData, BitcoinTransactionData, BlockIdentifier, OrdinalInscriptionTransferData,
OrdinalInscriptionTransferDestination, OrdinalOperation,
};
use deadpool_postgres::Transaction;
use crate::{
core::{compute_next_satpoint_data, SatPosition},
db::ordinals_pg,
ord::height::Height,
try_info,
utils::format_outpoint_to_watch,
};
use ord::height::Height;
use super::inscription_sequencing::get_bitcoin_network;
@@ -51,7 +50,7 @@ pub async fn augment_block_with_transfers(
ctx: &Context,
) -> Result<(), String> {
let network = get_bitcoin_network(&block.metadata.network);
let coinbase_subsidy = Height(block.block_identifier.index).subsidy();
let coinbase_subsidy = Height(block.block_identifier.index as u32).subsidy();
let coinbase_tx = &block.transactions[0].clone();
let mut cumulated_fees = 0;
for (tx_index, tx) in block.transactions.iter_mut().enumerate() {
@@ -261,9 +260,9 @@ pub async fn augment_transaction_with_ordinal_transfers(
#[cfg(test)]
mod test {
use chainhook_sdk::{
bitcoin::Network, types::OrdinalInscriptionTransferDestination, utils::Context,
};
use bitcoin::Network;
use chainhook_sdk::utils::Context;
use chainhook_types::OrdinalInscriptionTransferDestination;
use crate::core::test_builders::{TestTransactionBuilder, TestTxInBuilder, TestTxOutBuilder};

View File

@@ -1,5 +1,6 @@
use chainhook_postgres::deadpool_postgres::GenericClient;
use chainhook_sdk::{bitcoin::Network, types::OrdinalInscriptionNumber};
use bitcoin::Network;
use chainhook_types::OrdinalInscriptionNumber;
use deadpool_postgres::GenericClient;
use crate::db::ordinals_pg;
@@ -142,8 +143,8 @@ impl SequenceCursor {
#[cfg(test)]
mod test {
use bitcoin::Network;
use chainhook_postgres::{pg_begin, pg_pool_client};
use chainhook_sdk::bitcoin::Network;
use test_case::test_case;

View File

@@ -1,8 +1,5 @@
use chainhook_sdk::types::{
bitcoin::{OutPoint, TxIn, TxOut},
BitcoinBlockData, BitcoinBlockMetadata, BitcoinNetwork, BitcoinTransactionData,
BitcoinTransactionMetadata, BlockIdentifier, Brc20Operation, OrdinalInscriptionNumber,
OrdinalInscriptionRevealData, OrdinalOperation, TransactionIdentifier,
use chainhook_types::{
bitcoin::{OutPoint, TxIn, TxOut}, BitcoinBlockData, BitcoinBlockMetadata, BitcoinNetwork, BitcoinTransactionData, BitcoinTransactionMetadata, BlockIdentifier, Brc20Operation, OrdinalInscriptionCharms, OrdinalInscriptionNumber, OrdinalInscriptionRevealData, OrdinalOperation, TransactionIdentifier
};
pub struct TestBlockBuilder {
@@ -96,7 +93,7 @@ impl TestTransactionBuilder {
delegate: None,
metaprotocol: None,
metadata: None,
parent: None,
parents: vec![],
ordinal_number: 0,
ordinal_block_height: 0,
ordinal_offset: 0,
@@ -104,6 +101,7 @@ impl TestTransactionBuilder {
transfers_pre_inscription: 0,
satpoint_post_inscription: "b61b0172d95e266c18aea0c624db987e971a5d6d4ebc2aaed85da4642d635735:0:0".to_string(),
curse_type: None,
charms: OrdinalInscriptionCharms::none(),
},
)];
tx
@@ -157,7 +155,6 @@ impl TestTransactionBuilder {
inputs: self.inputs,
outputs: self.outputs,
ordinal_operations: self.ordinal_operations,
stacks_operations: vec![],
brc20_operation: self.brc20_operation,
proof: None,
fee: 0,

View File

@@ -1,7 +1,7 @@
use std::{path::PathBuf, thread::sleep, time::Duration};
use chainhook_sdk::utils::Context;
use rand::{thread_rng, Rng};
use rand::{rng, Rng};
use rocksdb::{DBPinnableSlice, Options, DB};
use crate::{config::Config, try_error, try_warn};
@@ -141,13 +141,13 @@ pub fn find_pinned_block_bytes_at_block_height<'a>(
// read_options.fill_cache(true);
// read_options.set_verify_checksums(false);
let mut backoff: f64 = 1.0;
let mut rng = thread_rng();
let mut rng = rng();
loop {
match blocks_db.get_pinned(block_height.to_be_bytes()) {
Ok(Some(res)) => return Some(res),
_ => {
attempt += 1;
backoff = 2.0 * backoff + (backoff * rng.gen_range(0.0..1.0));
backoff = 2.0 * backoff + (backoff * rng.random_range(0.0..1.0));
let duration = std::time::Duration::from_millis((backoff * 1_000.0) as u64);
try_warn!(
ctx,
@@ -175,14 +175,14 @@ pub fn find_block_bytes_at_block_height<'a>(
// read_options.fill_cache(true);
// read_options.set_verify_checksums(false);
let mut backoff: f64 = 1.0;
let mut rng = thread_rng();
let mut rng = rng();
loop {
match blocks_db.get(block_height.to_be_bytes()) {
Ok(Some(res)) => return Some(res),
_ => {
attempt += 1;
backoff = 2.0 * backoff + (backoff * rng.gen_range(0.0..1.0));
backoff = 2.0 * backoff + (backoff * rng.random_range(0.0..1.0));
let duration = std::time::Duration::from_millis((backoff * 1_000.0) as u64);
try_warn!(
ctx,
@@ -237,7 +237,7 @@ pub fn delete_blocks_in_block_range(
#[cfg(test)]
pub fn insert_standardized_block(
block: &chainhook_sdk::types::BitcoinBlockData,
block: &chainhook_types::BitcoinBlockData,
blocks_db_rw: &DB,
ctx: &Context,
) {

View File

@@ -1,7 +1,8 @@
use std::io::Cursor;
use std::io::{Read, Write};
use chainhook_sdk::{indexer::bitcoin::BitcoinBlockFullBreakdown, types::BitcoinBlockData};
use chainhook_sdk::indexer::bitcoin::BitcoinBlockFullBreakdown;
use chainhook_types::BitcoinBlockData;
#[derive(Debug)]
pub struct BlockBytesCursor<'a> {
@@ -368,9 +369,9 @@ mod tests {
use super::*;
use chainhook_sdk::{
indexer::bitcoin::{parse_downloaded_block, standardize_bitcoin_block},
types::BitcoinNetwork,
utils::Context,
};
use chainhook_types::BitcoinNetwork;
#[test]
fn test_block_cursor_roundtrip() {

View File

@@ -37,9 +37,7 @@ pub async fn reset_dbs(config: &Config, ctx: &Context) -> Result<(), String> {
Ok(())
}
pub async fn pg_reset_db(
pg_client: &mut chainhook_postgres::tokio_postgres::Client,
) -> Result<(), String> {
pub async fn pg_reset_db(pg_client: &mut tokio_postgres::Client) -> Result<(), String> {
pg_client
.batch_execute(
"
@@ -77,17 +75,49 @@ pub fn pg_test_config() -> chainhook_postgres::PgConnectionConfig {
}
#[cfg(test)]
pub fn pg_test_connection_pool() -> chainhook_postgres::deadpool_postgres::Pool {
pub fn pg_test_connection_pool() -> deadpool_postgres::Pool {
chainhook_postgres::pg_pool(&pg_test_config()).unwrap()
}
#[cfg(test)]
pub async fn pg_test_connection() -> chainhook_postgres::tokio_postgres::Client {
pub async fn pg_test_connection() -> tokio_postgres::Client {
chainhook_postgres::pg_connect(&pg_test_config())
.await
.unwrap()
}
#[cfg(test)]
pub async fn pg_test_clear_db(pg_client: &mut tokio_postgres::Client) {
match pg_client
.batch_execute(
"
DO $$ DECLARE
r RECORD;
BEGIN
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP
EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE';
END LOOP;
END $$;
DO $$ DECLARE
r RECORD;
BEGIN
FOR r IN (SELECT typname FROM pg_type WHERE typtype = 'e' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = current_schema())) LOOP
EXECUTE 'DROP TYPE IF EXISTS ' || quote_ident(r.typname) || ' CASCADE';
END LOOP;
END $$;",
)
.await {
Ok(rows) => rows,
Err(e) => {
println!(
"error rolling back test migrations: {}",
e.to_string()
);
std::process::exit(1);
}
};
}
/// Drops DB files in a test environment.
#[cfg(test)]
pub fn drop_all_dbs(config: &Config) {

View File

@@ -1,12 +1,12 @@
use chainhook_postgres::{
tokio_postgres::Row,
types::{PgBigIntU32, PgNumericU64},
FromPgRow,
};
use chainhook_sdk::types::{
use chainhook_types::{
BlockIdentifier, OrdinalInscriptionRevealData, OrdinalInscriptionTransferData,
OrdinalInscriptionTransferDestination, TransactionIdentifier,
};
use tokio_postgres::Row;
use crate::core::protocol::satoshi_tracking::parse_output_and_offset_from_satpoint;

View File

@@ -1,12 +1,12 @@
use chainhook_postgres::{
tokio_postgres::Row,
types::{PgBigIntU32, PgNumericU64},
FromPgRow,
};
use chainhook_sdk::types::{
use chainhook_types::{
BlockIdentifier, OrdinalInscriptionCurseType, OrdinalInscriptionRevealData,
TransactionIdentifier,
};
use tokio_postgres::Row;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DbInscription {
@@ -30,7 +30,6 @@ pub struct DbInscription {
pub pointer: Option<PgNumericU64>,
pub metadata: Option<String>,
pub metaprotocol: Option<String>,
pub parent: Option<String>,
pub delegate: Option<String>,
pub timestamp: PgBigIntU32,
}
@@ -81,7 +80,6 @@ impl DbInscription {
pointer: reveal.inscription_pointer.map(|p| PgNumericU64(p)),
metadata: reveal.metadata.as_ref().map(|m| m.to_string()),
metaprotocol: reveal.metaprotocol.clone(),
parent: reveal.parent.clone(),
delegate: reveal.delegate.clone(),
timestamp: PgBigIntU32(timestamp),
}
@@ -111,7 +109,6 @@ impl FromPgRow for DbInscription {
pointer: row.get("pointer"),
metadata: row.get("metadata"),
metaprotocol: row.get("metaprotocol"),
parent: row.get("parent"),
delegate: row.get("delegate"),
timestamp: row.get("timestamp"),
}

View File

@@ -0,0 +1,20 @@
use chainhook_types::OrdinalInscriptionRevealData;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DbInscriptionParent {
pub inscription_id: String,
pub parent_inscription_id: String,
}
impl DbInscriptionParent {
pub fn from_reveal(reveal: &OrdinalInscriptionRevealData) -> Result<Vec<Self>, String> {
Ok(reveal
.parents
.iter()
.map(|p| DbInscriptionParent {
inscription_id: reveal.inscription_id.clone(),
parent_inscription_id: p.clone(),
})
.collect())
}
}

View File

@@ -1,4 +1,4 @@
use chainhook_sdk::types::OrdinalInscriptionRevealData;
use chainhook_types::OrdinalInscriptionRevealData;
use regex::Regex;
lazy_static! {
@@ -33,7 +33,7 @@ impl DbInscriptionRecursion {
#[cfg(test)]
mod test {
use chainhook_sdk::types::{OrdinalInscriptionNumber, OrdinalInscriptionRevealData};
use chainhook_types::{OrdinalInscriptionCharms, OrdinalInscriptionNumber, OrdinalInscriptionRevealData};
use super::DbInscriptionRecursion;
@@ -53,7 +53,7 @@ mod test {
delegate: None,
metaprotocol: None,
metadata: None,
parent: None,
parents: vec![],
ordinal_number: 959876891264081,
ordinal_block_height: 191975,
ordinal_offset: 0,
@@ -61,6 +61,7 @@ mod test {
transfers_pre_inscription: 0,
satpoint_post_inscription: "e47a70a218dfa746ba410b1c057403bb481523d830562fd8dec61ec4d2915e5f:0:0".to_string(),
curse_type: None,
charms: OrdinalInscriptionCharms::none(),
};
let recursions = DbInscriptionRecursion::from_reveal(&reveal).unwrap();
assert_eq!(2, recursions.len());

View File

@@ -1,12 +1,12 @@
use chainhook_postgres::{
tokio_postgres::Row,
types::{PgBigIntU32, PgNumericU64},
FromPgRow,
};
use chainhook_sdk::types::{
use chainhook_types::{
BlockIdentifier, OrdinalInscriptionRevealData, OrdinalInscriptionTransferData,
OrdinalInscriptionTransferDestination, TransactionIdentifier,
};
use tokio_postgres::Row;
use crate::core::protocol::satoshi_tracking::parse_output_and_offset_from_satpoint;

View File

@@ -1,7 +1,8 @@
use chainhook_postgres::{tokio_postgres::Row, types::PgNumericU64, FromPgRow};
use chainhook_sdk::types::OrdinalInscriptionRevealData;
use chainhook_postgres::{types::PgNumericU64, FromPgRow};
use chainhook_types::OrdinalInscriptionRevealData;
use tokio_postgres::Row;
use crate::ord::{rarity::Rarity, sat::Sat};
use ord::{rarity::Rarity, sat::Sat};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DbSatoshi {

View File

@@ -1,6 +1,7 @@
mod db_current_location;
mod db_inscription;
mod db_inscription_recursion;
mod db_inscription_parent;
mod db_location;
mod db_satoshi;
@@ -9,3 +10,4 @@ pub use db_inscription::DbInscription;
pub use db_inscription_recursion::DbInscriptionRecursion;
pub use db_location::DbLocation;
pub use db_satoshi::DbSatoshi;
pub use db_inscription_parent::DbInscriptionParent;

View File

@@ -1,16 +1,16 @@
use std::collections::{BTreeMap, HashMap};
use chainhook_postgres::{
deadpool_postgres::GenericClient,
tokio_postgres::{types::ToSql, Client},
types::{PgBigIntU32, PgNumericU64},
utils,
};
use chainhook_sdk::types::{
use chainhook_types::{
bitcoin::TxIn, BitcoinBlockData, OrdinalInscriptionNumber, OrdinalOperation,
TransactionIdentifier,
};
use deadpool_postgres::GenericClient;
use refinery::embed_migrations;
use tokio_postgres::{types::ToSql, Client};
use crate::{
core::protocol::{satoshi_numbering::TraversalResult, satoshi_tracking::WatchedSatpoint},
@@ -18,7 +18,8 @@ use crate::{
};
use super::models::{
DbCurrentLocation, DbInscription, DbInscriptionRecursion, DbLocation, DbSatoshi,
DbCurrentLocation, DbInscription, DbInscriptionParent, DbInscriptionRecursion, DbLocation,
DbSatoshi,
};
embed_migrations!("../../migrations/ordinals");
@@ -257,7 +258,6 @@ async fn insert_inscriptions<T: GenericClient>(
params.push(&row.pointer);
params.push(&row.metadata);
params.push(&row.metaprotocol);
params.push(&row.parent);
params.push(&row.delegate);
params.push(&row.timestamp);
}
@@ -266,9 +266,9 @@ async fn insert_inscriptions<T: GenericClient>(
&format!("INSERT INTO inscriptions
(inscription_id, ordinal_number, number, classic_number, block_height, block_hash, tx_id, tx_index, address,
mime_type, content_type, content_length, content, fee, curse_type, recursive, input_index, pointer, metadata,
metaprotocol, parent, delegate, timestamp)
metaprotocol, delegate, timestamp)
VALUES {}
ON CONFLICT (number) DO NOTHING", utils::multi_row_query_param_str(chunk.len(), 23)),
ON CONFLICT (number) DO NOTHING", utils::multi_row_query_param_str(chunk.len(), 22)),
&params,
)
.await
@@ -307,6 +307,36 @@ async fn insert_inscription_recursions<T: GenericClient>(
Ok(())
}
async fn insert_inscription_parents<T: GenericClient>(
inscription_parents: &Vec<DbInscriptionParent>,
client: &T,
) -> Result<(), String> {
if inscription_parents.len() == 0 {
return Ok(());
}
for chunk in inscription_parents.chunks(500) {
let mut params: Vec<&(dyn ToSql + Sync)> = vec![];
for row in chunk.iter() {
params.push(&row.inscription_id);
params.push(&row.parent_inscription_id);
}
client
.query(
&format!(
"INSERT INTO inscription_parents
(inscription_id, parent_inscription_id)
VALUES {}
ON CONFLICT (inscription_id, parent_inscription_id) DO NOTHING",
utils::multi_row_query_param_str(chunk.len(), 2)
),
&params,
)
.await
.map_err(|e| format!("insert_inscription_parents: {e}"))?;
}
Ok(())
}
async fn insert_locations<T: GenericClient>(
locations: &Vec<DbLocation>,
client: &T,
@@ -695,6 +725,7 @@ pub async fn insert_block<T: GenericClient>(
let mut inscriptions = vec![];
let mut locations = vec![];
let mut inscription_recursions = vec![];
let mut inscription_parents = vec![];
let mut current_locations: HashMap<PgNumericU64, DbCurrentLocation> = HashMap::new();
let mut mime_type_counts = HashMap::new();
let mut sat_rarity_counts = HashMap::new();
@@ -737,6 +768,7 @@ pub async fn insert_block<T: GenericClient>(
inscription.recursive = true;
}
inscription_recursions.extend(recursions);
inscription_parents.extend(DbInscriptionParent::from_reveal(reveal)?);
inscriptions.push(inscription);
locations.push(DbLocation::from_reveal(
reveal,
@@ -809,6 +841,7 @@ pub async fn insert_block<T: GenericClient>(
insert_inscriptions(&inscriptions, client).await?;
insert_inscription_recursions(&inscription_recursions, client).await?;
insert_inscription_parents(&inscription_parents, client).await?;
insert_locations(&locations, client).await?;
insert_satoshis(&satoshis, client).await?;
insert_current_locations(&current_locations, client).await?;
@@ -972,15 +1005,14 @@ pub async fn rollback_block<T: GenericClient>(block_height: u64, client: &T) ->
#[cfg(test)]
mod test {
use chainhook_postgres::{
deadpool_postgres::GenericClient,
pg_begin, pg_pool_client,
types::{PgBigIntU32, PgNumericU64},
FromPgRow,
};
use chainhook_sdk::types::{
OrdinalInscriptionNumber, OrdinalInscriptionRevealData, OrdinalInscriptionTransferData,
OrdinalInscriptionTransferDestination, OrdinalOperation,
use chainhook_types::{
OrdinalInscriptionCharms, OrdinalInscriptionNumber, OrdinalInscriptionRevealData, OrdinalInscriptionTransferData, OrdinalInscriptionTransferDestination, OrdinalOperation
};
use deadpool_postgres::GenericClient;
use crate::{
core::test_builders::{TestBlockBuilder, TestTransactionBuilder},
@@ -1163,7 +1195,7 @@ mod test {
delegate: None,
metaprotocol: None,
metadata: None,
parent: None,
parents: vec![],
ordinal_number: 7000,
ordinal_block_height: 0,
ordinal_offset: 0,
@@ -1171,6 +1203,7 @@ mod test {
transfers_pre_inscription: 0,
satpoint_post_inscription: "b61b0172d95e266c18aea0c624db987e971a5d6d4ebc2aaed85da4642d635735:0:0".to_string(),
curse_type: None,
charms: OrdinalInscriptionCharms::none(),
},
))
.build()

View File

@@ -9,13 +9,9 @@ extern crate lazy_static;
extern crate serde;
pub extern crate chainhook_sdk;
pub extern crate hex;
pub mod config;
pub mod core;
pub mod db;
pub mod download;
pub mod ord;
pub mod service;
pub mod utils;

View File

@@ -1,95 +0,0 @@
use std::{
fmt::{Display, Formatter},
path::{Path, PathBuf},
};
use chainhook_sdk::bitcoincore_rpc::bitcoin::{self, Address, Block, Network, Script};
use super::*;
#[derive(Default, Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum Chain {
#[default]
Mainnet,
Testnet,
Signet,
Regtest,
}
impl Chain {
pub fn from_bitcoin_network(network: &BitcoinNetwork) -> Chain {
match network {
BitcoinNetwork::Mainnet => Chain::Mainnet,
BitcoinNetwork::Testnet => Chain::Testnet,
BitcoinNetwork::Regtest => Chain::Regtest,
BitcoinNetwork::Signet => Chain::Signet,
}
}
pub(crate) fn network(self) -> Network {
match self {
Self::Mainnet => Network::Bitcoin,
Self::Testnet => Network::Testnet,
Self::Signet => Network::Signet,
Self::Regtest => Network::Regtest,
}
}
pub(crate) fn default_rpc_port(self) -> u16 {
match self {
Self::Mainnet => 8332,
Self::Regtest => 18443,
Self::Signet => 38332,
Self::Testnet => 18332,
}
}
pub(crate) fn inscription_content_size_limit(self) -> Option<usize> {
match self {
Self::Mainnet | Self::Regtest => None,
Self::Testnet | Self::Signet => Some(1024),
}
}
pub fn first_inscription_height(self) -> u64 {
match self {
Self::Mainnet => 767430,
Self::Regtest => 0,
Self::Signet => 112402,
Self::Testnet => 2413343,
}
}
pub(crate) fn genesis_block(self) -> Block {
bitcoin::blockdata::constants::genesis_block(self.network())
}
pub fn address_from_script(self, script: &Script) -> Result<Address, bitcoin::address::Error> {
Address::from_script(script, self.network())
}
pub(crate) fn join_with_data_dir(self, data_dir: &Path) -> PathBuf {
match self {
Self::Mainnet => data_dir.to_owned(),
Self::Testnet => data_dir.join("testnet3"),
Self::Signet => data_dir.join("signet"),
Self::Regtest => data_dir.join("regtest"),
}
}
}
impl Display for Chain {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(
f,
"{}",
match self {
Self::Mainnet => "mainnet",
Self::Regtest => "regtest",
Self::Signet => "signet",
Self::Testnet => "testnet",
}
)
}
}

View File

@@ -1,22 +0,0 @@
use std::{fmt::Display, str::FromStr};
use serde::{Deserialize, Deserializer};
use super::*;
pub(crate) struct DeserializeFromStr<T: FromStr>(pub(crate) T);
impl<'de, T: FromStr> Deserialize<'de> for DeserializeFromStr<T>
where
T::Err: Display,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(Self(
FromStr::from_str(&String::deserialize(deserializer)?)
.map_err(serde::de::Error::custom)?,
))
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,237 +0,0 @@
use super::{height::Height, sat::Sat, COIN_VALUE, SUBSIDY_HALVING_INTERVAL};
#[derive(Copy, Clone, Eq, PartialEq, Debug, PartialOrd)]
pub(crate) struct Epoch(pub(crate) u64);
impl Epoch {
pub(crate) const STARTING_SATS: [Sat; 34] = [
Sat(0),
Sat(1050000000000000),
Sat(1575000000000000),
Sat(1837500000000000),
Sat(1968750000000000),
Sat(2034375000000000),
Sat(2067187500000000),
Sat(2083593750000000),
Sat(2091796875000000),
Sat(2095898437500000),
Sat(2097949218750000),
Sat(2098974609270000),
Sat(2099487304530000),
Sat(2099743652160000),
Sat(2099871825870000),
Sat(2099935912620000),
Sat(2099967955890000),
Sat(2099983977420000),
Sat(2099991988080000),
Sat(2099995993410000),
Sat(2099997995970000),
Sat(2099998997250000),
Sat(2099999497890000),
Sat(2099999748210000),
Sat(2099999873370000),
Sat(2099999935950000),
Sat(2099999967240000),
Sat(2099999982780000),
Sat(2099999990550000),
Sat(2099999994330000),
Sat(2099999996220000),
Sat(2099999997060000),
Sat(2099999997480000),
Sat(Sat::SUPPLY),
];
pub(crate) const FIRST_POST_SUBSIDY: Epoch = Self(33);
pub(crate) fn subsidy(self) -> u64 {
if self < Self::FIRST_POST_SUBSIDY {
(50 * COIN_VALUE) >> self.0
} else {
0
}
}
pub(crate) fn starting_sat(self) -> Sat {
*Self::STARTING_SATS
.get(usize::try_from(self.0).unwrap())
.unwrap_or_else(|| Self::STARTING_SATS.last().unwrap())
}
pub(crate) fn starting_height(self) -> Height {
Height(self.0 * SUBSIDY_HALVING_INTERVAL)
}
}
impl PartialEq<u64> for Epoch {
fn eq(&self, other: &u64) -> bool {
self.0 == *other
}
}
impl From<Sat> for Epoch {
fn from(sat: Sat) -> Self {
if sat < Self::STARTING_SATS[1] {
Epoch(0)
} else if sat < Self::STARTING_SATS[2] {
Epoch(1)
} else if sat < Self::STARTING_SATS[3] {
Epoch(2)
} else if sat < Self::STARTING_SATS[4] {
Epoch(3)
} else if sat < Self::STARTING_SATS[5] {
Epoch(4)
} else if sat < Self::STARTING_SATS[6] {
Epoch(5)
} else if sat < Self::STARTING_SATS[7] {
Epoch(6)
} else if sat < Self::STARTING_SATS[8] {
Epoch(7)
} else if sat < Self::STARTING_SATS[9] {
Epoch(8)
} else if sat < Self::STARTING_SATS[10] {
Epoch(9)
} else if sat < Self::STARTING_SATS[11] {
Epoch(10)
} else if sat < Self::STARTING_SATS[12] {
Epoch(11)
} else if sat < Self::STARTING_SATS[13] {
Epoch(12)
} else if sat < Self::STARTING_SATS[14] {
Epoch(13)
} else if sat < Self::STARTING_SATS[15] {
Epoch(14)
} else if sat < Self::STARTING_SATS[16] {
Epoch(15)
} else if sat < Self::STARTING_SATS[17] {
Epoch(16)
} else if sat < Self::STARTING_SATS[18] {
Epoch(17)
} else if sat < Self::STARTING_SATS[19] {
Epoch(18)
} else if sat < Self::STARTING_SATS[20] {
Epoch(19)
} else if sat < Self::STARTING_SATS[21] {
Epoch(20)
} else if sat < Self::STARTING_SATS[22] {
Epoch(21)
} else if sat < Self::STARTING_SATS[23] {
Epoch(22)
} else if sat < Self::STARTING_SATS[24] {
Epoch(23)
} else if sat < Self::STARTING_SATS[25] {
Epoch(24)
} else if sat < Self::STARTING_SATS[26] {
Epoch(25)
} else if sat < Self::STARTING_SATS[27] {
Epoch(26)
} else if sat < Self::STARTING_SATS[28] {
Epoch(27)
} else if sat < Self::STARTING_SATS[29] {
Epoch(28)
} else if sat < Self::STARTING_SATS[30] {
Epoch(29)
} else if sat < Self::STARTING_SATS[31] {
Epoch(30)
} else if sat < Self::STARTING_SATS[32] {
Epoch(31)
} else if sat < Self::STARTING_SATS[33] {
Epoch(32)
} else {
Epoch(33)
}
}
}
impl From<Height> for Epoch {
fn from(height: Height) -> Self {
Self(height.0 / SUBSIDY_HALVING_INTERVAL)
}
}
#[cfg(test)]
mod tests {
use crate::ord::{epoch::Epoch, height::Height, sat::Sat, SUBSIDY_HALVING_INTERVAL};
#[test]
fn starting_sat() {
assert_eq!(Epoch(0).starting_sat(), 0);
assert_eq!(
Epoch(1).starting_sat(),
Epoch(0).subsidy() * SUBSIDY_HALVING_INTERVAL
);
assert_eq!(
Epoch(2).starting_sat(),
(Epoch(0).subsidy() + Epoch(1).subsidy()) * SUBSIDY_HALVING_INTERVAL
);
assert_eq!(Epoch(33).starting_sat(), Sat(Sat::SUPPLY));
assert_eq!(Epoch(34).starting_sat(), Sat(Sat::SUPPLY));
}
#[test]
fn starting_sats() {
let mut sat = 0;
let mut epoch_sats = Vec::new();
for epoch in 0..34 {
epoch_sats.push(sat);
sat += SUBSIDY_HALVING_INTERVAL * Epoch(epoch).subsidy();
}
assert_eq!(Epoch::STARTING_SATS.as_slice(), epoch_sats);
assert_eq!(Epoch::STARTING_SATS.len(), 34);
}
#[test]
fn subsidy() {
assert_eq!(Epoch(0).subsidy(), 5000000000);
assert_eq!(Epoch(1).subsidy(), 2500000000);
assert_eq!(Epoch(32).subsidy(), 1);
assert_eq!(Epoch(33).subsidy(), 0);
}
#[test]
fn starting_height() {
assert_eq!(Epoch(0).starting_height(), 0);
assert_eq!(Epoch(1).starting_height(), SUBSIDY_HALVING_INTERVAL);
assert_eq!(Epoch(2).starting_height(), SUBSIDY_HALVING_INTERVAL * 2);
}
#[test]
fn from_height() {
assert_eq!(Epoch::from(Height(0)), 0);
assert_eq!(Epoch::from(Height(SUBSIDY_HALVING_INTERVAL)), 1);
assert_eq!(Epoch::from(Height(SUBSIDY_HALVING_INTERVAL) + 1), 1);
}
#[test]
fn from_sat() {
for (epoch, starting_sat) in Epoch::STARTING_SATS.into_iter().enumerate() {
if epoch > 0 {
assert_eq!(
Epoch::from(Sat(starting_sat.n() - 1)),
Epoch(epoch as u64 - 1)
);
}
assert_eq!(Epoch::from(starting_sat), Epoch(epoch as u64));
assert_eq!(Epoch::from(starting_sat + 1), Epoch(epoch as u64));
}
assert_eq!(Epoch::from(Sat(0)), 0);
assert_eq!(Epoch::from(Sat(1)), 0);
assert_eq!(Epoch::from(Epoch(1).starting_sat()), 1);
assert_eq!(Epoch::from(Epoch(1).starting_sat() + 1), 1);
assert_eq!(Epoch::from(Sat(u64::max_value())), 33);
}
#[test]
fn eq() {
assert_eq!(Epoch(0), 0);
assert_eq!(Epoch(100), 100);
}
#[test]
fn first_post_subsidy() {
assert_eq!(Epoch::FIRST_POST_SUBSIDY.subsidy(), 0);
assert!((Epoch(Epoch::FIRST_POST_SUBSIDY.0 - 1)).subsidy() > 0);
}
}

View File

@@ -1,50 +0,0 @@
use std::ops::{Add, Sub};
use super::{epoch::Epoch, sat::Sat, *};
// use std::fmt::Display;
#[derive(Copy, Clone, Debug, Ord, Eq, PartialEq, PartialOrd)]
pub struct Height(pub u64);
impl Height {
pub fn n(self) -> u64 {
self.0
}
pub fn subsidy(self) -> u64 {
Epoch::from(self).subsidy()
}
pub fn starting_sat(self) -> Sat {
let epoch = Epoch::from(self);
let epoch_starting_sat = epoch.starting_sat();
let epoch_starting_height = epoch.starting_height();
epoch_starting_sat + (self - epoch_starting_height.n()).n() * epoch.subsidy()
}
pub fn period_offset(self) -> u64 {
self.0 % DIFFCHANGE_INTERVAL
}
}
impl Add<u64> for Height {
type Output = Self;
fn add(self, other: u64) -> Height {
Self(self.0 + other)
}
}
impl Sub<u64> for Height {
type Output = Self;
fn sub(self, other: u64) -> Height {
Self(self.0 - other)
}
}
impl PartialEq<u64> for Height {
fn eq(&self, other: &u64) -> bool {
self.0 == *other
}
}

View File

@@ -1,660 +0,0 @@
use std::io::Cursor;
use chainhook_sdk::bitcoin::{hashes::Hash, Txid};
use super::{inscription_id::InscriptionId, media::Media};
use {
super::*,
chainhook_sdk::bitcoin::{
blockdata::{
opcodes,
script::{self, PushBytesBuf},
},
ScriptBuf,
},
std::str,
};
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq, Default)]
pub struct Inscription {
pub body: Option<Vec<u8>>,
pub content_encoding: Option<Vec<u8>>,
pub content_type: Option<Vec<u8>>,
pub duplicate_field: bool,
pub incomplete_field: bool,
pub metadata: Option<Vec<u8>>,
pub metaprotocol: Option<Vec<u8>>,
pub parent: Option<Vec<u8>>,
pub pointer: Option<Vec<u8>>,
pub unrecognized_even_field: bool,
pub delegate: Option<Vec<u8>>,
}
impl Inscription {
#[cfg(test)]
pub(crate) fn new(content_type: Option<Vec<u8>>, body: Option<Vec<u8>>) -> Self {
Self {
content_type,
body,
..Default::default()
}
}
pub(crate) fn pointer_value(pointer: u64) -> Vec<u8> {
let mut bytes = pointer.to_le_bytes().to_vec();
while bytes.last().copied() == Some(0) {
bytes.pop();
}
bytes
}
pub(crate) fn append_reveal_script_to_builder(
&self,
mut builder: script::Builder,
) -> script::Builder {
builder = builder
.push_opcode(opcodes::OP_FALSE)
.push_opcode(opcodes::all::OP_IF)
.push_slice(envelope::PROTOCOL_ID);
if let Some(content_type) = self.content_type.clone() {
builder = builder
.push_slice(envelope::CONTENT_TYPE_TAG)
.push_slice(PushBytesBuf::try_from(content_type).unwrap());
}
if let Some(content_encoding) = self.content_encoding.clone() {
builder = builder
.push_slice(envelope::CONTENT_ENCODING_TAG)
.push_slice(PushBytesBuf::try_from(content_encoding).unwrap());
}
if let Some(protocol) = self.metaprotocol.clone() {
builder = builder
.push_slice(envelope::METAPROTOCOL_TAG)
.push_slice(PushBytesBuf::try_from(protocol).unwrap());
}
if let Some(parent) = self.parent.clone() {
builder = builder
.push_slice(envelope::PARENT_TAG)
.push_slice(PushBytesBuf::try_from(parent).unwrap());
}
if let Some(pointer) = self.pointer.clone() {
builder = builder
.push_slice(envelope::POINTER_TAG)
.push_slice(PushBytesBuf::try_from(pointer).unwrap());
}
if let Some(metadata) = &self.metadata {
for chunk in metadata.chunks(520) {
builder = builder.push_slice(envelope::METADATA_TAG);
builder = builder.push_slice(PushBytesBuf::try_from(chunk.to_vec()).unwrap());
}
}
if let Some(body) = &self.body {
builder = builder.push_slice(envelope::BODY_TAG);
for chunk in body.chunks(520) {
builder = builder.push_slice(PushBytesBuf::try_from(chunk.to_vec()).unwrap());
}
}
builder.push_opcode(opcodes::all::OP_ENDIF)
}
#[cfg(test)]
pub(crate) fn append_reveal_script(&self, builder: script::Builder) -> ScriptBuf {
self.append_reveal_script_to_builder(builder).into_script()
}
pub(crate) fn append_batch_reveal_script_to_builder(
inscriptions: &[Inscription],
mut builder: script::Builder,
) -> script::Builder {
for inscription in inscriptions {
builder = inscription.append_reveal_script_to_builder(builder);
}
builder
}
pub(crate) fn append_batch_reveal_script(
inscriptions: &[Inscription],
builder: script::Builder,
) -> ScriptBuf {
Inscription::append_batch_reveal_script_to_builder(inscriptions, builder).into_script()
}
pub(crate) fn media(&self) -> Media {
if self.body.is_none() {
return Media::Unknown;
}
let Some(content_type) = self.content_type() else {
return Media::Unknown;
};
content_type.parse().unwrap_or(Media::Unknown)
}
pub(crate) fn body(&self) -> Option<&[u8]> {
Some(self.body.as_ref()?)
}
pub(crate) fn into_body(self) -> Option<Vec<u8>> {
self.body
}
pub(crate) fn content_length(&self) -> Option<usize> {
Some(self.body()?.len())
}
pub(crate) fn content_type(&self) -> Option<&str> {
str::from_utf8(self.content_type.as_ref()?).ok()
}
pub(crate) fn metaprotocol(&self) -> Option<&str> {
str::from_utf8(self.metaprotocol.as_ref()?).ok()
}
fn inscription_id_field(field: &Option<Vec<u8>>) -> Option<InscriptionId> {
let value = field.as_ref()?;
if value.len() < Txid::LEN {
return None;
}
if value.len() > Txid::LEN + 4 {
return None;
}
let (txid, index) = value.split_at(Txid::LEN);
if let Some(last) = index.last() {
// Accept fixed length encoding with 4 bytes (with potential trailing zeroes)
// or variable length (no trailing zeroes)
if index.len() != 4 && *last == 0 {
return None;
}
}
let txid = Txid::from_slice(txid).unwrap();
let index = [
index.first().copied().unwrap_or(0),
index.get(1).copied().unwrap_or(0),
index.get(2).copied().unwrap_or(0),
index.get(3).copied().unwrap_or(0),
];
let index = u32::from_le_bytes(index);
Some(InscriptionId { txid, index })
}
pub(crate) fn delegate(&self) -> Option<InscriptionId> {
Self::inscription_id_field(&self.delegate)
}
pub(crate) fn metadata(&self) -> Option<ciborium::Value> {
ciborium::from_reader(Cursor::new(self.metadata.as_ref()?)).ok()
}
pub(crate) fn parent(&self) -> Option<InscriptionId> {
use chainhook_sdk::bitcoin::hash_types::Txid as TXID_LEN;
let value = self.parent.as_ref()?;
if value.len() < TXID_LEN::LEN {
return None;
}
if value.len() > TXID_LEN::LEN + 4 {
return None;
}
let (txid, index) = value.split_at(TXID_LEN::LEN);
if let Some(last) = index.last() {
// Accept fixed length encoding with 4 bytes (with potential trailing zeroes)
// or variable length (no trailing zeroes)
if index.len() != 4 && *last == 0 {
return None;
}
}
let txid = Txid::from_slice(txid).unwrap();
let index = [
index.first().copied().unwrap_or(0),
index.get(1).copied().unwrap_or(0),
index.get(2).copied().unwrap_or(0),
index.get(3).copied().unwrap_or(0),
];
let index = u32::from_le_bytes(index);
Some(InscriptionId { txid, index })
}
pub(crate) fn pointer(&self) -> Option<u64> {
let value = self.pointer.as_ref()?;
if value.iter().skip(8).copied().any(|byte| byte != 0) {
return None;
}
let pointer = [
value.first().copied().unwrap_or(0),
value.get(1).copied().unwrap_or(0),
value.get(2).copied().unwrap_or(0),
value.get(3).copied().unwrap_or(0),
value.get(4).copied().unwrap_or(0),
value.get(5).copied().unwrap_or(0),
value.get(6).copied().unwrap_or(0),
value.get(7).copied().unwrap_or(0),
];
Some(u64::from_le_bytes(pointer))
}
#[cfg(test)]
pub(crate) fn to_witness(&self) -> chainhook_sdk::bitcoin::Witness {
let builder = script::Builder::new();
let script = self.append_reveal_script(builder);
let mut witness = chainhook_sdk::bitcoin::Witness::new();
witness.push(script);
witness.push([]);
witness
}
}
#[cfg(test)]
mod tests {
use chainhook_sdk::bitcoin::Witness;
use super::*;
fn inscription(content_type: &str, body: impl AsRef<[u8]>) -> Inscription {
Inscription::new(Some(content_type.into()), Some(body.as_ref().into()))
}
fn envelope(payload: &[&[u8]]) -> Witness {
let mut builder = script::Builder::new()
.push_opcode(opcodes::OP_FALSE)
.push_opcode(opcodes::all::OP_IF);
for data in payload {
let mut buf = PushBytesBuf::new();
buf.extend_from_slice(data).unwrap();
builder = builder.push_slice(buf);
}
let script = builder.push_opcode(opcodes::all::OP_ENDIF).into_script();
Witness::from_slice(&[script.into_bytes(), Vec::new()])
}
#[test]
fn reveal_script_chunks_body() {
assert_eq!(
inscription("foo", [])
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
7
);
assert_eq!(
inscription("foo", [0; 1])
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
8
);
assert_eq!(
inscription("foo", [0; 520])
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
8
);
assert_eq!(
inscription("foo", [0; 521])
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
9
);
assert_eq!(
inscription("foo", [0; 1040])
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
9
);
assert_eq!(
inscription("foo", [0; 1041])
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
10
);
}
#[test]
fn reveal_script_chunks_metadata() {
assert_eq!(
Inscription {
metadata: None,
..Default::default()
}
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
4
);
assert_eq!(
Inscription {
metadata: Some(Vec::new()),
..Default::default()
}
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
4
);
assert_eq!(
Inscription {
metadata: Some(vec![0; 1]),
..Default::default()
}
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
6
);
assert_eq!(
Inscription {
metadata: Some(vec![0; 520]),
..Default::default()
}
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
6
);
assert_eq!(
Inscription {
metadata: Some(vec![0; 521]),
..Default::default()
}
.append_reveal_script(script::Builder::new())
.instructions()
.count(),
8
);
}
#[test]
fn inscription_with_no_parent_field_has_no_parent() {
assert!(Inscription {
parent: None,
..Default::default()
}
.parent()
.is_none());
}
#[test]
fn inscription_with_parent_field_shorter_than_txid_length_has_no_parent() {
assert!(Inscription {
parent: Some(vec![]),
..Default::default()
}
.parent()
.is_none());
}
#[test]
fn inscription_with_parent_field_longer_than_txid_and_index_has_no_parent() {
assert!(Inscription {
parent: Some(vec![1; 37]),
..Default::default()
}
.parent()
.is_none());
}
#[test]
fn inscription_with_parent_field_index_with_trailing_zeroes_and_fixed_length_has_parent() {
let mut parent = vec![1; 36];
parent[35] = 0;
assert!(Inscription {
parent: Some(parent),
..Default::default()
}
.parent()
.is_some());
}
#[test]
fn inscription_with_parent_field_index_with_trailing_zeroes_and_variable_length_has_no_parent()
{
let mut parent = vec![1; 35];
parent[34] = 0;
assert!(Inscription {
parent: Some(parent),
..Default::default()
}
.parent()
.is_none());
}
#[test]
fn inscription_parent_txid_is_deserialized_correctly() {
assert_eq!(
Inscription {
parent: Some(vec![
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c,
0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19,
0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
]),
..Default::default()
}
.parent()
.unwrap()
.txid,
"1f1e1d1c1b1a191817161514131211100f0e0d0c0b0a09080706050403020100"
.parse()
.unwrap()
);
}
#[test]
fn inscription_parent_with_zero_byte_index_field_is_deserialized_correctly() {
assert_eq!(
Inscription {
parent: Some(vec![1; 32]),
..Default::default()
}
.parent()
.unwrap()
.index,
0
);
}
#[test]
fn inscription_parent_with_one_byte_index_field_is_deserialized_correctly() {
assert_eq!(
Inscription {
parent: Some(vec![
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01
]),
..Default::default()
}
.parent()
.unwrap()
.index,
1
);
}
#[test]
fn inscription_parent_with_two_byte_index_field_is_deserialized_correctly() {
assert_eq!(
Inscription {
parent: Some(vec![
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x02
]),
..Default::default()
}
.parent()
.unwrap()
.index,
0x0201,
);
}
#[test]
fn inscription_parent_with_three_byte_index_field_is_deserialized_correctly() {
assert_eq!(
Inscription {
parent: Some(vec![
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x02, 0x03
]),
..Default::default()
}
.parent()
.unwrap()
.index,
0x030201,
);
}
#[test]
fn inscription_parent_with_four_byte_index_field_is_deserialized_correctly() {
assert_eq!(
Inscription {
parent: Some(vec![
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01, 0x02, 0x03, 0x04,
]),
..Default::default()
}
.parent()
.unwrap()
.index,
0x04030201,
);
}
#[test]
fn pointer_decode() {
assert_eq!(
Inscription {
pointer: None,
..Default::default()
}
.pointer(),
None
);
assert_eq!(
Inscription {
pointer: Some(vec![0]),
..Default::default()
}
.pointer(),
Some(0),
);
assert_eq!(
Inscription {
pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8]),
..Default::default()
}
.pointer(),
Some(0x0807060504030201),
);
assert_eq!(
Inscription {
pointer: Some(vec![1, 2, 3, 4, 5, 6]),
..Default::default()
}
.pointer(),
Some(0x0000060504030201),
);
assert_eq!(
Inscription {
pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0, 0]),
..Default::default()
}
.pointer(),
Some(0x0807060504030201),
);
assert_eq!(
Inscription {
pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0, 1]),
..Default::default()
}
.pointer(),
None,
);
assert_eq!(
Inscription {
pointer: Some(vec![1, 2, 3, 4, 5, 6, 7, 8, 1]),
..Default::default()
}
.pointer(),
None,
);
}
#[test]
fn pointer_encode() {
assert_eq!(
Inscription {
pointer: None,
..Default::default()
}
.to_witness(),
envelope(&[b"ord"]),
);
assert_eq!(
Inscription {
pointer: Some(vec![1, 2, 3]),
..Default::default()
}
.to_witness(),
envelope(&[b"ord", &[2], &[1, 2, 3]]),
);
}
}

View File

@@ -1,231 +0,0 @@
use std::{
fmt::{Display, Formatter},
str::FromStr,
};
use chainhook_sdk::bitcoin::Txid;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use super::deserialize_from_str::DeserializeFromStr;
#[derive(Debug, PartialEq, Copy, Clone, Hash, Eq)]
pub struct InscriptionId {
pub(crate) txid: Txid,
pub(crate) index: u32,
}
impl<'de> Deserialize<'de> for InscriptionId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(DeserializeFromStr::deserialize(deserializer)?.0)
}
}
impl Serialize for InscriptionId {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.collect_str(self)
}
}
impl Display for InscriptionId {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(f, "{}i{}", self.txid, self.index)
}
}
#[derive(Debug)]
pub enum ParseError {
Character(char),
Length(usize),
Separator(char),
Txid(chainhook_sdk::bitcoin::hashes::hex::HexToArrayError),
Index(std::num::ParseIntError),
}
impl Display for ParseError {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
match self {
Self::Character(c) => write!(f, "invalid character: '{c}'"),
Self::Length(len) => write!(f, "invalid length: {len}"),
Self::Separator(c) => write!(f, "invalid seprator: `{c}`"),
Self::Txid(err) => write!(f, "invalid txid: {err}"),
Self::Index(err) => write!(f, "invalid index: {err}"),
}
}
}
impl std::error::Error for ParseError {}
impl FromStr for InscriptionId {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Some(char) = s.chars().find(|char| !char.is_ascii()) {
return Err(ParseError::Character(char));
}
const TXID_LEN: usize = 64;
const MIN_LEN: usize = TXID_LEN + 2;
if s.len() < MIN_LEN {
return Err(ParseError::Length(s.len()));
}
let txid = &s[..TXID_LEN];
let separator = s.chars().nth(TXID_LEN).unwrap();
if separator != 'i' {
return Err(ParseError::Separator(separator));
}
let vout = &s[TXID_LEN + 1..];
Ok(Self {
txid: txid.parse().map_err(ParseError::Txid)?,
index: vout.parse().map_err(ParseError::Index)?,
})
}
}
impl From<Txid> for InscriptionId {
fn from(txid: Txid) -> Self {
Self { txid, index: 0 }
}
}
#[cfg(test)]
mod tests {
macro_rules! assert_matches {
($expression:expr, $( $pattern:pat_param )|+ $( if $guard:expr )? $(,)?) => {
match $expression {
$( $pattern )|+ $( if $guard )? => {}
left => panic!(
"assertion failed: (left ~= right)\n left: `{:?}`\n right: `{}`",
left,
stringify!($($pattern)|+ $(if $guard)?)
),
}
}
}
use super::*;
fn txid(n: u64) -> Txid {
let hex = format!("{n:x}");
if hex.is_empty() || hex.len() > 1 {
panic!();
}
hex.repeat(64).parse().unwrap()
}
pub(crate) fn inscription_id(n: u32) -> InscriptionId {
let hex = format!("{n:x}");
if hex.is_empty() || hex.len() > 1 {
panic!();
}
format!("{}i{n}", hex.repeat(64)).parse().unwrap()
}
#[test]
fn display() {
assert_eq!(
inscription_id(1).to_string(),
"1111111111111111111111111111111111111111111111111111111111111111i1",
);
assert_eq!(
InscriptionId {
txid: txid(1),
index: 0,
}
.to_string(),
"1111111111111111111111111111111111111111111111111111111111111111i0",
);
assert_eq!(
InscriptionId {
txid: txid(1),
index: 0xFFFFFFFF,
}
.to_string(),
"1111111111111111111111111111111111111111111111111111111111111111i4294967295",
);
}
#[test]
fn from_str() {
assert_eq!(
"1111111111111111111111111111111111111111111111111111111111111111i1"
.parse::<InscriptionId>()
.unwrap(),
inscription_id(1),
);
assert_eq!(
"1111111111111111111111111111111111111111111111111111111111111111i4294967295"
.parse::<InscriptionId>()
.unwrap(),
InscriptionId {
txid: txid(1),
index: 0xFFFFFFFF,
},
);
assert_eq!(
"1111111111111111111111111111111111111111111111111111111111111111i4294967295"
.parse::<InscriptionId>()
.unwrap(),
InscriptionId {
txid: txid(1),
index: 0xFFFFFFFF,
},
);
}
#[test]
fn from_str_bad_character() {
assert_matches!(
"".parse::<InscriptionId>(),
Err(ParseError::Character('→')),
);
}
#[test]
fn from_str_bad_length() {
assert_matches!("foo".parse::<InscriptionId>(), Err(ParseError::Length(3)));
}
#[test]
fn from_str_bad_separator() {
assert_matches!(
"0000000000000000000000000000000000000000000000000000000000000000x0"
.parse::<InscriptionId>(),
Err(ParseError::Separator('x')),
);
}
#[test]
fn from_str_bad_index() {
assert_matches!(
"0000000000000000000000000000000000000000000000000000000000000000ifoo"
.parse::<InscriptionId>(),
Err(ParseError::Index(_)),
);
}
#[test]
fn from_str_bad_txid() {
assert_matches!(
"x000000000000000000000000000000000000000000000000000000000000000i0"
.parse::<InscriptionId>(),
Err(ParseError::Txid(_)),
);
}
}

View File

@@ -1,102 +0,0 @@
use std::{
fmt::{Display, Formatter},
str::FromStr,
};
use anyhow::{anyhow, Error};
#[derive(Debug, PartialEq, Copy, Clone)]
pub(crate) enum Media {
Audio,
Code(Language),
Font,
Iframe,
Image,
Markdown,
Model,
Pdf,
Text,
Unknown,
Video,
}
#[derive(Debug, PartialEq, Copy, Clone)]
pub(crate) enum Language {
Css,
JavaScript,
Json,
Python,
Yaml,
}
impl Display for Language {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(
f,
"{}",
match self {
Self::Css => "css",
Self::JavaScript => "javascript",
Self::Json => "json",
Self::Python => "python",
Self::Yaml => "yaml",
}
)
}
}
impl Media {
#[rustfmt::skip]
const TABLE: &'static [(&'static str, Media, &'static [&'static str])] = &[
("application/cbor", Media::Unknown, &["cbor"]),
("application/json", Media::Code(Language::Json), &["json"]),
("application/octet-stream", Media::Unknown, &["bin"]),
("application/pdf", Media::Pdf, &["pdf"]),
("application/pgp-signature", Media::Text, &["asc"]),
("application/protobuf", Media::Unknown, &["binpb"]),
("application/x-javascript", Media::Code(Language::JavaScript), &[]),
("application/yaml", Media::Code(Language::Yaml), &["yaml", "yml"]),
("audio/flac", Media::Audio, &["flac"]),
("audio/mpeg", Media::Audio, &["mp3"]),
("audio/wav", Media::Audio, &["wav"]),
("font/otf", Media::Font, &["otf"]),
("font/ttf", Media::Font, &["ttf"]),
("font/woff", Media::Font, &["woff"]),
("font/woff2", Media::Font, &["woff2"]),
("image/apng", Media::Image, &["apng"]),
("image/avif", Media::Image, &[]),
("image/gif", Media::Image, &["gif"]),
("image/jpeg", Media::Image, &["jpg", "jpeg"]),
("image/png", Media::Image, &["png"]),
("image/svg+xml", Media::Iframe, &["svg"]),
("image/webp", Media::Image, &["webp"]),
("model/gltf+json", Media::Model, &["gltf"]),
("model/gltf-binary", Media::Model, &["glb"]),
("model/stl", Media::Unknown, &["stl"]),
("text/css", Media::Code(Language::Css), &["css"]),
("text/html", Media::Iframe, &[]),
("text/html;charset=utf-8", Media::Iframe, &["html"]),
("text/javascript", Media::Code(Language::JavaScript), &["js"]),
("text/markdown", Media::Markdown, &[]),
("text/markdown;charset=utf-8", Media::Markdown, &["md"]),
("text/plain", Media::Text, &[]),
("text/plain;charset=utf-8", Media::Text, &["txt"]),
("text/x-python", Media::Code(Language::Python), &["py"]),
("video/mp4", Media::Video, &["mp4"]),
("video/webm", Media::Video, &["webm"]),
];
}
impl FromStr for Media {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
for entry in Self::TABLE {
if entry.0 == s {
return Ok(entry.1);
}
}
Err(anyhow!("unknown content type: {s}"))
}
}

View File

@@ -1,26 +0,0 @@
#![allow(dead_code)]
#![allow(unused_variables)]
type Result<T = (), E = anyhow::Error> = std::result::Result<T, E>;
use chainhook_sdk::types::BitcoinNetwork;
pub mod chain;
pub mod deserialize_from_str;
pub mod envelope;
pub mod epoch;
pub mod height;
pub mod inscription;
pub mod inscription_id;
pub mod media;
pub mod sat;
pub mod sat_point;
pub mod degree;
pub mod rarity;
const DIFFCHANGE_INTERVAL: u64 =
chainhook_sdk::bitcoincore_rpc::bitcoin::blockdata::constants::DIFFCHANGE_INTERVAL as u64;
const SUBSIDY_HALVING_INTERVAL: u64 =
chainhook_sdk::bitcoincore_rpc::bitcoin::blockdata::constants::SUBSIDY_HALVING_INTERVAL as u64;
const CYCLE_EPOCHS: u64 = 6;
pub const COIN_VALUE: u64 = 100_000_000;

View File

@@ -1,184 +0,0 @@
use std::fmt::{Display, Formatter};
use degree::Degree;
use sat::Sat;
use super::*;
#[derive(Debug, PartialEq, PartialOrd, Copy, Clone)]
pub enum Rarity {
Common,
Uncommon,
Rare,
Epic,
Legendary,
Mythic,
}
impl From<Rarity> for u8 {
fn from(rarity: Rarity) -> Self {
rarity as u8
}
}
// impl TryFrom<u8> for Rarity {
// type Error = u8;
// fn try_from(rarity: u8) -> Result<Self, u8> {
// match rarity {
// 0 => Ok(Self::Common),
// 1 => Ok(Self::Uncommon),
// 2 => Ok(Self::Rare),
// 3 => Ok(Self::Epic),
// 4 => Ok(Self::Legendary),
// 5 => Ok(Self::Mythic),
// n => Err(n),
// }
// }
// }
impl Display for Rarity {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(
f,
"{}",
match self {
Self::Common => "common",
Self::Uncommon => "uncommon",
Self::Rare => "rare",
Self::Epic => "epic",
Self::Legendary => "legendary",
Self::Mythic => "mythic",
}
)
}
}
impl From<Sat> for Rarity {
fn from(sat: Sat) -> Self {
let Degree {
hour,
minute,
second,
third,
} = sat.degree();
if hour == 0 && minute == 0 && second == 0 && third == 0 {
Self::Mythic
} else if minute == 0 && second == 0 && third == 0 {
Self::Legendary
} else if minute == 0 && third == 0 {
Self::Epic
} else if second == 0 && third == 0 {
Self::Rare
} else if third == 0 {
Self::Uncommon
} else {
Self::Common
}
}
}
// impl FromStr for Rarity {
// type Err = String;
// fn from_str(s: &str) -> Result<Self, Self::Err> {
// match s {
// "common" => Ok(Self::Common),
// "uncommon" => Ok(Self::Uncommon),
// "rare" => Ok(Self::Rare),
// "epic" => Ok(Self::Epic),
// "legendary" => Ok(Self::Legendary),
// "mythic" => Ok(Self::Mythic),
// _ => Err(format!("invalid rarity `{s}`")),
// }
// }
// }
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn rarity() {
// assert_eq!(Sat(0).rarity(), Rarity::Mythic);
// assert_eq!(Sat(1).rarity(), Rarity::Common);
// assert_eq!(Sat(50 * COIN_VALUE - 1).rarity(), Rarity::Common);
// assert_eq!(Sat(50 * COIN_VALUE).rarity(), Rarity::Uncommon);
// assert_eq!(Sat(50 * COIN_VALUE + 1).rarity(), Rarity::Common);
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(DIFFCHANGE_INTERVAL) - 1).rarity(),
// Rarity::Common
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(DIFFCHANGE_INTERVAL)).rarity(),
// Rarity::Rare
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(DIFFCHANGE_INTERVAL) + 1).rarity(),
// Rarity::Common
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL) - 1).rarity(),
// Rarity::Common
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL)).rarity(),
// Rarity::Epic
// );
// assert_eq!(
// Sat(50 * COIN_VALUE * u64::from(SUBSIDY_HALVING_INTERVAL) + 1).rarity(),
// Rarity::Common
// );
// assert_eq!(Sat(2067187500000000 - 1).rarity(), Rarity::Common);
// assert_eq!(Sat(2067187500000000).rarity(), Rarity::Legendary);
// assert_eq!(Sat(2067187500000000 + 1).rarity(), Rarity::Common);
// }
// #[test]
// fn from_str_and_deserialize_ok() {
// #[track_caller]
// fn case(s: &str, expected: Rarity) {
// let actual = s.parse::<Rarity>().unwrap();
// assert_eq!(actual, expected);
// let round_trip = actual.to_string().parse::<Rarity>().unwrap();
// assert_eq!(round_trip, expected);
// let serialized = serde_json::to_string(&expected).unwrap();
// assert!(serde_json::from_str::<Rarity>(&serialized).is_ok());
// }
// case("common", Rarity::Common);
// case("uncommon", Rarity::Uncommon);
// case("rare", Rarity::Rare);
// case("epic", Rarity::Epic);
// case("legendary", Rarity::Legendary);
// case("mythic", Rarity::Mythic);
// }
// #[test]
// fn conversions_with_u8() {
// for &expected in &[
// Rarity::Common,
// Rarity::Uncommon,
// Rarity::Rare,
// Rarity::Epic,
// Rarity::Legendary,
// Rarity::Mythic,
// ] {
// let n: u8 = expected.into();
// let actual = Rarity::try_from(n).unwrap();
// assert_eq!(actual, expected);
// }
// assert_eq!(Rarity::try_from(6), Err(6));
// }
// #[test]
// fn error() {
// assert_eq!("foo".parse::<Rarity>().unwrap_err(), "invalid rarity `foo`");
// }
// }

View File

@@ -1,214 +0,0 @@
use std::ops::{Add, AddAssign};
use degree::Degree;
use super::{epoch::Epoch, height::Height, *};
#[derive(Copy, Clone, Eq, PartialEq, Debug, Ord, PartialOrd, Deserialize, Serialize)]
#[serde(transparent)]
pub struct Sat(pub u64);
impl Sat {
pub(crate) const LAST: Self = Self(Self::SUPPLY - 1);
pub(crate) const SUPPLY: u64 = 2099999997690000;
pub fn degree(self) -> Degree {
self.into()
}
pub(crate) fn n(self) -> u64 {
self.0
}
pub(crate) fn height(self) -> Height {
self.epoch().starting_height() + self.epoch_position() / self.epoch().subsidy()
}
pub(crate) fn cycle(self) -> u64 {
Epoch::from(self).0 / CYCLE_EPOCHS
}
pub(crate) fn percentile(self) -> String {
format!("{}%", (self.0 as f64 / Self::LAST.0 as f64) * 100.0)
}
pub(crate) fn epoch(self) -> Epoch {
self.into()
}
pub(crate) fn third(self) -> u64 {
self.epoch_position() % self.epoch().subsidy()
}
pub(crate) fn epoch_position(self) -> u64 {
self.0 - self.epoch().starting_sat().0
}
/// `Sat::rarity` is expensive and is called frequently when indexing.
/// Sat::is_common only checks if self is `Rarity::Common` but is
/// much faster.
pub(crate) fn is_common(self) -> bool {
let epoch = self.epoch();
(self.0 - epoch.starting_sat().0) % epoch.subsidy() != 0
}
pub(crate) fn name(self) -> String {
let mut x = Self::SUPPLY - self.0;
let mut name = String::new();
while x > 0 {
name.push(
"abcdefghijklmnopqrstuvwxyz"
.chars()
.nth(((x - 1) % 26) as usize)
.unwrap(),
);
x = (x - 1) / 26;
}
name.chars().rev().collect()
}
}
impl PartialEq<u64> for Sat {
fn eq(&self, other: &u64) -> bool {
self.0 == *other
}
}
impl PartialOrd<u64> for Sat {
fn partial_cmp(&self, other: &u64) -> Option<std::cmp::Ordering> {
self.0.partial_cmp(other)
}
}
impl Add<u64> for Sat {
type Output = Self;
fn add(self, other: u64) -> Sat {
Sat(self.0 + other)
}
}
impl AddAssign<u64> for Sat {
fn add_assign(&mut self, other: u64) {
*self = Sat(self.0 + other);
}
}
#[cfg(test)]
mod tests {
use super::COIN_VALUE;
use super::*;
#[test]
fn n() {
assert_eq!(Sat(1).n(), 1);
assert_eq!(Sat(100).n(), 100);
}
#[test]
fn name() {
assert_eq!(Sat(0).name(), "nvtdijuwxlp");
assert_eq!(Sat(1).name(), "nvtdijuwxlo");
assert_eq!(Sat(26).name(), "nvtdijuwxkp");
assert_eq!(Sat(27).name(), "nvtdijuwxko");
assert_eq!(Sat(2099999997689999).name(), "a");
assert_eq!(Sat(2099999997689999 - 1).name(), "b");
assert_eq!(Sat(2099999997689999 - 25).name(), "z");
assert_eq!(Sat(2099999997689999 - 26).name(), "aa");
}
#[test]
fn number() {
assert_eq!(Sat(2099999997689999).n(), 2099999997689999);
}
#[test]
fn epoch_position() {
assert_eq!(Epoch(0).starting_sat().epoch_position(), 0);
assert_eq!((Epoch(0).starting_sat() + 100).epoch_position(), 100);
assert_eq!(Epoch(1).starting_sat().epoch_position(), 0);
assert_eq!(Epoch(2).starting_sat().epoch_position(), 0);
}
#[test]
fn subsidy_position() {
assert_eq!(Sat(0).third(), 0);
assert_eq!(Sat(1).third(), 1);
assert_eq!(
Sat(Height(0).subsidy() - 1).third(),
Height(0).subsidy() - 1
);
assert_eq!(Sat(Height(0).subsidy()).third(), 0);
assert_eq!(Sat(Height(0).subsidy() + 1).third(), 1);
assert_eq!(
Sat(Epoch(1).starting_sat().n() + Epoch(1).subsidy()).third(),
0
);
assert_eq!(Sat::LAST.third(), 0);
}
#[test]
fn supply() {
let mut mined = 0;
for height in 0.. {
let subsidy = Height(height).subsidy();
if subsidy == 0 {
break;
}
mined += subsidy;
}
assert_eq!(Sat::SUPPLY, mined);
}
#[test]
fn last() {
assert_eq!(Sat::LAST, Sat::SUPPLY - 1);
}
#[test]
fn eq() {
assert_eq!(Sat(0), 0);
assert_eq!(Sat(1), 1);
}
#[test]
fn partial_ord() {
assert!(Sat(1) > 0);
assert!(Sat(0) < 1);
}
#[test]
fn add() {
assert_eq!(Sat(0) + 1, 1);
assert_eq!(Sat(1) + 100, 101);
}
#[test]
fn add_assign() {
let mut sat = Sat(0);
sat += 1;
assert_eq!(sat, 1);
sat += 100;
assert_eq!(sat, 101);
}
#[test]
fn third() {
assert_eq!(Sat(0).third(), 0);
assert_eq!(Sat(50 * COIN_VALUE - 1).third(), 4999999999);
assert_eq!(Sat(50 * COIN_VALUE).third(), 0);
assert_eq!(Sat(50 * COIN_VALUE + 1).third(), 1);
}
#[test]
fn percentile() {
assert_eq!(Sat(0).percentile(), "0%");
assert_eq!(Sat(Sat::LAST.n() / 2).percentile(), "49.99999999999998%");
assert_eq!(Sat::LAST.percentile(), "100%");
}
}

View File

@@ -1,128 +0,0 @@
use std::{
fmt::{Display, Formatter},
io,
str::FromStr,
};
use chainhook_sdk::bitcoincore_rpc::bitcoin::{
self,
consensus::{Decodable, Encodable},
OutPoint,
};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use super::{deserialize_from_str::DeserializeFromStr, Result};
#[derive(Debug, PartialEq, Copy, Clone, Eq, PartialOrd, Ord)]
pub struct SatPoint {
pub(crate) outpoint: OutPoint,
pub(crate) offset: u64,
}
impl Display for SatPoint {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(f, "{}:{}", self.outpoint, self.offset)
}
}
impl Encodable for SatPoint {
fn consensus_encode<S: io::Write + ?Sized>(&self, s: &mut S) -> Result<usize, io::Error> {
let len = self.outpoint.consensus_encode(s)?;
Ok(len + self.offset.consensus_encode(s)?)
}
}
impl Decodable for SatPoint {
fn consensus_decode<D: io::Read + ?Sized>(
d: &mut D,
) -> Result<Self, bitcoin::consensus::encode::Error> {
Ok(SatPoint {
outpoint: Decodable::consensus_decode(d)?,
offset: Decodable::consensus_decode(d)?,
})
}
}
impl Serialize for SatPoint {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.collect_str(self)
}
}
impl<'de> Deserialize<'de> for SatPoint {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(DeserializeFromStr::deserialize(deserializer)?.0)
}
}
impl FromStr for SatPoint {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (outpoint, offset) = s
.rsplit_once(':')
.ok_or_else(|| anyhow::anyhow!("invalid satpoint: {s}"))?;
Ok(SatPoint {
outpoint: outpoint.parse()?,
offset: offset.parse()?,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn from_str_ok() {
assert_eq!(
"1111111111111111111111111111111111111111111111111111111111111111:1:1"
.parse::<SatPoint>()
.unwrap(),
SatPoint {
outpoint: "1111111111111111111111111111111111111111111111111111111111111111:1"
.parse()
.unwrap(),
offset: 1,
}
);
}
#[test]
fn from_str_err() {
"abc".parse::<SatPoint>().unwrap_err();
"abc:xyz".parse::<SatPoint>().unwrap_err();
"1111111111111111111111111111111111111111111111111111111111111111:1"
.parse::<SatPoint>()
.unwrap_err();
"1111111111111111111111111111111111111111111111111111111111111111:1:foo"
.parse::<SatPoint>()
.unwrap_err();
}
#[test]
fn deserialize_ok() {
assert_eq!(
serde_json::from_str::<SatPoint>(
"\"1111111111111111111111111111111111111111111111111111111111111111:1:1\""
)
.unwrap(),
SatPoint {
outpoint: "1111111111111111111111111111111111111111111111111111111111111111:1"
.parse()
.unwrap(),
offset: 1,
}
);
}
}

View File

@@ -18,15 +18,15 @@ use crate::db::ordinals_pg;
use crate::utils::bitcoind::bitcoind_wait_for_chain_tip;
use crate::utils::monitoring::{start_serving_prometheus_metrics, PrometheusMonitoring};
use crate::{try_error, try_info};
use chainhook_postgres::deadpool_postgres::Pool;
use chainhook_postgres::{pg_begin, pg_pool, pg_pool_client};
use chainhook_sdk::observer::{
start_event_observer, BitcoinBlockDataCached, ObserverEvent, ObserverSidecar,
};
use chainhook_sdk::types::BlockIdentifier;
use chainhook_types::BlockIdentifier;
use chainhook_sdk::utils::{BlockHeights, Context};
use crossbeam_channel::select;
use dashmap::DashMap;
use deadpool_postgres::Pool;
use fxhash::FxHasher;
use std::collections::BTreeMap;
@@ -135,7 +135,6 @@ impl Service {
observer_command_rx,
Some(observer_event_tx),
Some(zmq_observer_sidecar),
None,
inner_ctx,
);

View File

@@ -8,7 +8,7 @@ use std::{
path::PathBuf,
};
use chainhook_sdk::types::TransactionIdentifier;
use chainhook_types::TransactionIdentifier;
pub fn read_file_content_at_path(file_path: &PathBuf) -> Result<Vec<u8>, String> {
use std::fs::File;

Some files were not shown because too many files have changed in this diff Show More