feat: add brc20 logic

This commit is contained in:
Gaze
2024-06-07 00:48:31 +07:00
parent f3c6180c17
commit ccdc4c56ff
20 changed files with 1425 additions and 73 deletions

View File

@@ -39,7 +39,7 @@
"ui.completion.usePlaceholders": false,
"ui.diagnostic.analyses": {
// https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md
// "fieldalignment": false,
"fieldalignment": false,
"nilness": true,
"shadow": false,
"unusedparams": true,

View File

@@ -56,6 +56,15 @@ INSERT INTO "brc20_inscription_entry_states" ("id", "block_height", "transfer_co
-- name: CreateInscriptionTransfers :batchexec
INSERT INTO "brc20_inscription_transfers" ("inscription_id", "block_height", "tx_index", "old_satpoint_tx_hash", "old_satpoint_out_idx", "old_satpoint_offset", "new_satpoint_tx_hash", "new_satpoint_out_idx", "new_satpoint_offset", "new_pkscript", "new_output_value", "sent_as_fee") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12);
-- name: CreateDeployEvents :batchexec
INSERT INTO "brc20_deploy_events" ("inscription_id", "inscription_number", "tick", "original_tick", "tx_hash", "block_height", "tx_index", "timestamp", "pkscript", "total_supply", "decimals", "limit_per_mint", "is_self_mint") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13);
-- name: CreateMintEvents :batchexec
INSERT INTO "brc20_mint_events" ("inscription_id", "inscription_number", "tick", "original_tick", "tx_hash", "block_height", "tx_index", "timestamp", "pkscript", "amount", "parent_id") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11);
-- name: CreateTransferEvents :batchexec
INSERT INTO "brc20_transfer_events" ("inscription_id", "inscription_number", "tick", "original_tick", "tx_hash", "block_height", "tx_index", "timestamp", "from_pkscript", "from_satpoint", "to_pkscript", "to_satpoint", "amount") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13);
-- name: DeleteIndexedBlocksSinceHeight :exec
DELETE FROM "brc20_indexed_blocks" WHERE "height" >= $1;

View File

@@ -0,0 +1,16 @@
package brc20
import "github.com/gaze-network/indexer-network/common"
var selfMintActivationHeights = map[common.Network]uint64{
common.NetworkMainnet: 837090,
common.NetworkTestnet: 837090,
}
func isSelfMintActivated(height uint64, network common.Network) bool {
activationHeight, ok := selfMintActivationHeights[network]
if !ok {
return false
}
return height >= activationHeight
}

View File

@@ -0,0 +1,21 @@
package brc20
type Operation string
const (
OperationDeploy Operation = "deploy"
OperationMint Operation = "mint"
OperationTransfer Operation = "transfer"
)
func (o Operation) IsValid() bool {
switch o {
case OperationDeploy, OperationMint, OperationTransfer:
return true
}
return false
}
func (o Operation) String() string {
return string(o)
}

View File

@@ -0,0 +1,211 @@
package brc20
import (
"encoding/json"
"math"
"strconv"
"strings"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/modules/brc20/internal/entity"
"github.com/gaze-network/uint128"
)
type rawPayload struct {
P string // required
Op string `json:"op"` // required
Tick string `json:"tick"` // required
// for deploy operations
Max string `json:"max"` // required
Lim *string `json:"lim"`
Dec *string `json:"dec"`
SelfMint *string `json:"self_mint"`
// for mint/transfer operations
Amt string `json:"amt"` // required
}
type Payload struct {
Transfer *entity.InscriptionTransfer
P string
Op Operation
Tick string // lower-cased tick
OriginalTick string // original tick before lower-cased
// for deploy operations
Max uint128.Uint128
Lim uint128.Uint128
Dec uint16
SelfMint bool
// for mint/transfer operations
Amt uint128.Uint128
}
var (
ErrInvalidProtocol = errors.New("invalid protocol: must be 'brc20'")
ErrInvalidOperation = errors.New("invalid operation for brc20: must be one of 'deploy', 'mint', or 'transfer'")
ErrInvalidTickLength = errors.New("invalid tick length: must be 4 or 5 bytes")
ErrEmptyTick = errors.New("empty tick")
ErrEmptyMax = errors.New("empty max")
ErrInvalidMax = errors.New("invalid max")
ErrInvalidDec = errors.New("invalid dec")
ErrInvalidSelfMint = errors.New("invalid self_mint")
ErrInvalidAmt = errors.New("invalid amt")
ErrNumberOverflow = errors.New("number overflow: max value is (2^64-1) * 10^18")
)
func ParsePayload(transfer *entity.InscriptionTransfer) (*Payload, error) {
var p rawPayload
err := json.Unmarshal(transfer.Content, &p)
if err != nil {
return nil, errors.Wrap(err, "failed to unmarshal payload as json")
}
if p.P != "brc20" {
return nil, errors.WithStack(ErrInvalidProtocol)
}
if !Operation(p.Op).IsValid() {
return nil, errors.WithStack(ErrInvalidOperation)
}
if p.Tick == "" {
return nil, errors.WithStack(ErrEmptyTick)
}
if len(p.Tick) != 4 && len(p.Tick) != 5 {
return nil, errors.WithStack(ErrInvalidTickLength)
}
parsed := Payload{
Transfer: transfer,
P: p.P,
Op: Operation(p.Op),
Tick: strings.ToLower(p.Tick),
OriginalTick: p.Tick,
}
switch parsed.Op {
case OperationDeploy:
if p.Max == "" {
return nil, errors.WithStack(ErrEmptyMax)
}
var rawDec string
if p.Dec != nil {
rawDec = *p.Dec
}
dec, ok := strconv.ParseUint(rawDec, 10, 16)
if ok != nil {
return nil, errors.Wrap(ok, "failed to parse dec")
}
if dec > 18 {
return nil, errors.WithStack(ErrInvalidDec)
}
parsed.Dec = uint16(dec)
max, err := parseNumberExtendedTo18Decimal(p.Max, dec)
if err != nil {
return nil, errors.Wrap(err, "failed to parse max")
}
parsed.Max = max
limit := max
if p.Lim != nil {
limit, err = parseNumberExtendedTo18Decimal(*p.Lim, dec)
if err != nil {
return nil, errors.Wrap(err, "failed to parse lim")
}
}
parsed.Lim = limit
// 5-bytes ticks are self-mint only
if len(parsed.OriginalTick) == 5 {
if p.SelfMint == nil || *p.SelfMint != "true" {
return nil, errors.WithStack(ErrInvalidSelfMint)
}
// infinite mints if tick is self-mint, and max is set to 0
if parsed.Max.IsZero() {
parsed.Max = maxIntegerValue
if parsed.Lim.IsZero() {
parsed.Lim = maxIntegerValue
}
}
}
if parsed.Max.IsZero() {
return nil, errors.WithStack(ErrInvalidMax)
}
case OperationMint, OperationTransfer:
if p.Amt == "" {
return nil, errors.WithStack(ErrInvalidAmt)
}
// NOTE: check tick decimals after parsing payload
amt, err := parseNumberExtendedTo18Decimal(p.Amt, 18)
if err != nil {
return nil, errors.Wrap(err, "failed to parse amt")
}
parsed.Amt = amt
default:
return nil, errors.WithStack(ErrInvalidOperation)
}
return &parsed, nil
}
// max integer for all numeric fields (except dec) is (2^64-1) * 10^18
var maxIntegerValue = uint128.From64(math.MaxUint64).Mul64(1_000_000_000_000_000_000)
func parseNumberExtendedTo18Decimal(s string, dec uint64) (uint128.Uint128, error) {
parts := strings.Split(s, ".")
if len(parts) > 1 {
return uint128.Uint128{}, errors.New("cannot parse decimal number: too many decimal points")
}
wholePart := parts[0]
var decimalPart string
if len(parts) == 1 {
decimalPart := parts[1]
if len(decimalPart) == 0 || len(decimalPart) > int(dec) {
return uint128.Uint128{}, errors.New("invalid decimal part")
}
}
// pad decimal part with zeros until 18 digits
decimalPart += strings.Repeat("0", 18-len(decimalPart))
number, err := uint128.FromString(wholePart + decimalPart)
if err != nil {
if errors.Is(err, uint128.ErrValueOverflow) {
return uint128.Uint128{}, errors.WithStack(ErrNumberOverflow)
}
return uint128.Uint128{}, errors.Wrap(err, "failed to parse number")
}
if number.Cmp(maxIntegerValue) > 0 {
return uint128.Uint128{}, errors.WithStack(ErrNumberOverflow)
}
return number, nil
}
var powerOfTens = []uint64{
1e0,
1e1,
1e2,
1e3,
1e4,
1e5,
1e6,
1e7,
1e8,
1e9,
1e10,
1e11,
1e12,
1e13,
1e14,
1e15,
1e16,
1e17,
1e18,
}
func IsAmountWithinDecimals(amt uint128.Uint128, dec uint16) bool {
if dec > 18 {
return false
}
_, rem := amt.QuoRem64(powerOfTens[18-int(dec)])
return rem != 0
}

View File

@@ -27,21 +27,27 @@ type BRC20ReaderDataGateway interface {
GetIndexedBlockByHeight(ctx context.Context, height int64) (*entity.IndexedBlock, error)
GetProcessorStats(ctx context.Context) (*entity.ProcessorStats, error)
GetInscriptionTransfersInOutPoints(ctx context.Context, outPoints []wire.OutPoint) (map[ordinals.SatPoint][]*entity.InscriptionTransfer, error)
GetInscriptionEntryById(ctx context.Context, id ordinals.InscriptionId) (*ordinals.InscriptionEntry, error)
GetInscriptionEntriesByIds(ctx context.Context, ids []ordinals.InscriptionId) (map[ordinals.InscriptionId]*ordinals.InscriptionEntry, error)
GetTickEntriesByTicks(ctx context.Context, ticks []string) (map[string]*entity.TickEntry, error)
}
type BRC20WriterDataGateway interface {
CreateIndexedBlock(ctx context.Context, block *entity.IndexedBlock) error
CreateProcessorStats(ctx context.Context, stats *entity.ProcessorStats) error
CreateTickEntries(ctx context.Context, blockHeight uint64, entries []*entity.TickEntry) error
CreateTickEntryStates(ctx context.Context, blockHeight uint64, entryStates []*entity.TickEntry) error
CreateInscriptionEntries(ctx context.Context, blockHeight uint64, entries []*ordinals.InscriptionEntry) error
CreateInscriptionEntryStates(ctx context.Context, blockHeight uint64, entryStates []*ordinals.InscriptionEntry) error
CreateInscriptionTransfers(ctx context.Context, transfers []*entity.InscriptionTransfer) error
CreateEventDeploys(ctx context.Context, events []*entity.EventDeploy) error
CreateEventMints(ctx context.Context, events []*entity.EventMint) error
CreateEventTransfers(ctx context.Context, events []*entity.EventTransfer) error
// used for revert data
DeleteIndexedBlocksSinceHeight(ctx context.Context, height uint64) error
DeleteProcessorStatsSinceHeight(ctx context.Context, height uint64) error
DeleteTicksSinceHeight(ctx context.Context, height uint64) error
DeleteTickStatesSinceHeight(ctx context.Context, height uint64) error
DeleteTickEntriesSinceHeight(ctx context.Context, height uint64) error
DeleteTickEntryStatesSinceHeight(ctx context.Context, height uint64) error
DeleteDeployEventsSinceHeight(ctx context.Context, height uint64) error
DeleteMintEventsSinceHeight(ctx context.Context, height uint64) error
DeleteTransferEventsSinceHeight(ctx context.Context, height uint64) error

View File

@@ -0,0 +1,27 @@
package entity
import (
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/gaze-network/indexer-network/modules/brc20/internal/ordinals"
"github.com/gaze-network/uint128"
)
type EventDeploy struct {
Id uint64
InscriptionId ordinals.InscriptionId
InscriptionNumber uint64
Tick string
OriginalTick string
TxHash chainhash.Hash
BlockHeight uint64
TxIndex uint32
Timestamp time.Time
PkScript []byte
TotalSupply uint128.Uint128
Decimals uint16
LimitPerMint uint128.Uint128
IsSelfMint bool
}

View File

@@ -0,0 +1,25 @@
package entity
import (
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/gaze-network/indexer-network/modules/brc20/internal/ordinals"
"github.com/gaze-network/uint128"
)
type EventMint struct {
Id uint64
InscriptionId ordinals.InscriptionId
InscriptionNumber uint64
Tick string
OriginalTick string
TxHash chainhash.Hash
BlockHeight uint64
TxIndex uint32
Timestamp time.Time
PkScript []byte
Amount uint128.Uint128
ParentId *ordinals.InscriptionId
}

View File

@@ -0,0 +1,27 @@
package entity
import (
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/gaze-network/indexer-network/modules/brc20/internal/ordinals"
"github.com/gaze-network/uint128"
)
type EventTransfer struct {
Id uint64
InscriptionId ordinals.InscriptionId
InscriptionNumber uint64
Tick string
OriginalTick string
TxHash chainhash.Hash
BlockHeight uint64
TxIndex uint32
Timestamp time.Time
FromPkScript []byte
FromSatPoint ordinals.SatPoint
ToPkScript []byte
ToSatPoint ordinals.SatPoint
Amount uint128.Uint128
}

View File

@@ -0,0 +1,25 @@
package entity
import (
"time"
"github.com/gaze-network/indexer-network/modules/brc20/internal/ordinals"
"github.com/gaze-network/uint128"
)
type TickEntry struct {
Tick string
OriginalTick string
TotalSupply uint128.Uint128
Decimals uint16
LimitPerMint uint128.Uint128
IsSelfMint bool
DeployInscriptionId ordinals.InscriptionId
CreatedAt time.Time
CreatedAtHeight uint64
MintedAmount uint128.Uint128
BurnedAmount uint128.Uint128
CompletedAt time.Time
CompletedAtHeight uint64
}

View File

@@ -13,6 +13,7 @@ type Inscription struct {
Pointer *uint64
}
// TODO: refactor ordinals.InscriptionEntry to entity.InscriptionEntry
type InscriptionEntry struct {
Id InscriptionId
Number int64

View File

@@ -92,23 +92,39 @@ func (r *Repository) GetInscriptionTransfersInOutPoints(ctx context.Context, out
return results, nil
}
func (r *Repository) GetInscriptionEntryById(ctx context.Context, id ordinals.InscriptionId) (*ordinals.InscriptionEntry, error) {
models, err := r.queries.GetInscriptionEntriesByIds(ctx, []string{id.String()})
func (r *Repository) GetInscriptionEntriesByIds(ctx context.Context, ids []ordinals.InscriptionId) (map[ordinals.InscriptionId]*ordinals.InscriptionEntry, error) {
idStrs := lo.Map(ids, func(id ordinals.InscriptionId, _ int) string { return id.String() })
models, err := r.queries.GetInscriptionEntriesByIds(ctx, idStrs)
if err != nil {
return nil, errors.WithStack(err)
}
if len(models) == 0 {
return nil, errors.WithStack(errs.NotFound)
result := make(map[ordinals.InscriptionId]*ordinals.InscriptionEntry)
for _, model := range models {
inscriptionEntry, err := mapInscriptionEntryModelToType(model)
if err != nil {
return nil, errors.Wrap(err, "failed to parse inscription entry model")
}
result[inscriptionEntry.Id] = &inscriptionEntry
}
if len(models) > 1 {
// sanity check
panic("multiple inscription entries found for the same id")
}
inscriptionEntry, err := mapInscriptionEntryModelToType(models[0])
return result, nil
}
func (r *Repository) GetTickEntriesByTicks(ctx context.Context, ticks []string) (map[string]*entity.TickEntry, error) {
models, err := r.queries.GetTickEntriesByTicks(ctx, ticks)
if err != nil {
return nil, errors.WithStack(err)
}
return &inscriptionEntry, nil
result := make(map[string]*entity.TickEntry)
for _, model := range models {
tickEntry, err := mapTickEntryModelToType(model)
if err != nil {
return nil, errors.Wrap(err, "failed to parse tick entry model")
}
result[tickEntry.Tick] = &tickEntry
}
return result, nil
}
func (r *Repository) CreateIndexedBlock(ctx context.Context, block *entity.IndexedBlock) error {
@@ -127,6 +143,50 @@ func (r *Repository) CreateProcessorStats(ctx context.Context, stats *entity.Pro
return nil
}
func (r *Repository) CreateTickEntries(ctx context.Context, blockHeight uint64, entries []*entity.TickEntry) error {
entryParams := make([]gen.CreateTickEntriesParams, 0)
for _, entry := range entries {
params, _, err := mapTickEntryTypeToParams(*entry, blockHeight)
if err != nil {
return errors.Wrap(err, "cannot map tick entry to create params")
}
entryParams = append(entryParams, params)
}
results := r.queries.CreateTickEntries(ctx, entryParams)
var execErrors []error
results.Exec(func(i int, err error) {
if err != nil {
execErrors = append(execErrors, err)
}
})
if len(execErrors) > 0 {
return errors.Wrap(errors.Join(execErrors...), "error during exec")
}
return nil
}
func (r *Repository) CreateTickEntryStates(ctx context.Context, blockHeight uint64, entryStates []*entity.TickEntry) error {
entryParams := make([]gen.CreateTickEntryStatesParams, 0)
for _, entry := range entryStates {
_, params, err := mapTickEntryTypeToParams(*entry, blockHeight)
if err != nil {
return errors.Wrap(err, "cannot map tick entry to create params")
}
entryParams = append(entryParams, params)
}
results := r.queries.CreateTickEntryStates(ctx, entryParams)
var execErrors []error
results.Exec(func(i int, err error) {
if err != nil {
execErrors = append(execErrors, err)
}
})
if len(execErrors) > 0 {
return errors.Wrap(errors.Join(execErrors...), "error during exec")
}
return nil
}
func (r *Repository) CreateInscriptionEntries(ctx context.Context, blockHeight uint64, entries []*ordinals.InscriptionEntry) error {
inscriptionEntryParams := make([]gen.CreateInscriptionEntriesParams, 0)
for _, entry := range entries {
@@ -188,6 +248,72 @@ func (r *Repository) CreateInscriptionTransfers(ctx context.Context, transfers [
return nil
}
func (r *Repository) CreateEventDeploys(ctx context.Context, events []*entity.EventDeploy) error {
params := make([]gen.CreateDeployEventsParams, 0)
for _, event := range events {
param, err := mapEventDeployTypeToParams(*event)
if err != nil {
return errors.Wrap(err, "cannot map event deploy to create params")
}
params = append(params, param)
}
results := r.queries.CreateDeployEvents(ctx, params)
var execErrors []error
results.Exec(func(i int, err error) {
if err != nil {
execErrors = append(execErrors, err)
}
})
if len(execErrors) > 0 {
return errors.Wrap(errors.Join(execErrors...), "error during exec")
}
return nil
}
func (r *Repository) CreateEventMints(ctx context.Context, events []*entity.EventMint) error {
params := make([]gen.CreateMintEventsParams, 0)
for _, event := range events {
param, err := mapEventMintTypeToParams(*event)
if err != nil {
return errors.Wrap(err, "cannot map event mint to create params")
}
params = append(params, param)
}
results := r.queries.CreateMintEvents(ctx, params)
var execErrors []error
results.Exec(func(i int, err error) {
if err != nil {
execErrors = append(execErrors, err)
}
})
if len(execErrors) > 0 {
return errors.Wrap(errors.Join(execErrors...), "error during exec")
}
return nil
}
func (r *Repository) CreateEventTransfers(ctx context.Context, events []*entity.EventTransfer) error {
params := make([]gen.CreateTransferEventsParams, 0)
for _, event := range events {
param, err := mapEventTransferTypeToParams(*event)
if err != nil {
return errors.Wrap(err, "cannot map event transfer to create params")
}
params = append(params, param)
}
results := r.queries.CreateTransferEvents(ctx, params)
var execErrors []error
results.Exec(func(i int, err error) {
if err != nil {
execErrors = append(execErrors, err)
}
})
if len(execErrors) > 0 {
return errors.Wrap(errors.Join(execErrors...), "error during exec")
}
return nil
}
func (r *Repository) DeleteIndexedBlocksSinceHeight(ctx context.Context, height uint64) error {
if err := r.queries.DeleteIndexedBlocksSinceHeight(ctx, int32(height)); err != nil {
return errors.Wrap(err, "error during exec")
@@ -202,15 +328,15 @@ func (r *Repository) DeleteProcessorStatsSinceHeight(ctx context.Context, height
return nil
}
func (r *Repository) DeleteTicksSinceHeight(ctx context.Context, height uint64) error {
if err := r.queries.DeleteTicksSinceHeight(ctx, int32(height)); err != nil {
func (r *Repository) DeleteTickEntriesSinceHeight(ctx context.Context, height uint64) error {
if err := r.queries.DeleteTickEntriesSinceHeight(ctx, int32(height)); err != nil {
return errors.Wrap(err, "error during exec")
}
return nil
}
func (r *Repository) DeleteTickStatesSinceHeight(ctx context.Context, height uint64) error {
if err := r.queries.DeleteTickStatesSinceHeight(ctx, int32(height)); err != nil {
func (r *Repository) DeleteTickEntryStatesSinceHeight(ctx context.Context, height uint64) error {
if err := r.queries.DeleteTickEntryStatesSinceHeight(ctx, int32(height)); err != nil {
return errors.Wrap(err, "error during exec")
}
return nil

View File

@@ -17,6 +17,77 @@ var (
ErrBatchAlreadyClosed = errors.New("batch already closed")
)
const createDeployEvents = `-- name: CreateDeployEvents :batchexec
INSERT INTO "brc20_deploy_events" ("inscription_id", "inscription_number", "tick", "original_tick", "tx_hash", "block_height", "tx_index", "timestamp", "pkscript", "total_supply", "decimals", "limit_per_mint", "is_self_mint") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
`
type CreateDeployEventsBatchResults struct {
br pgx.BatchResults
tot int
closed bool
}
type CreateDeployEventsParams struct {
InscriptionID string
InscriptionNumber int64
Tick string
OriginalTick string
TxHash string
BlockHeight int32
TxIndex int32
Timestamp pgtype.Timestamp
Pkscript string
TotalSupply pgtype.Numeric
Decimals int16
LimitPerMint pgtype.Numeric
IsSelfMint bool
}
func (q *Queries) CreateDeployEvents(ctx context.Context, arg []CreateDeployEventsParams) *CreateDeployEventsBatchResults {
batch := &pgx.Batch{}
for _, a := range arg {
vals := []interface{}{
a.InscriptionID,
a.InscriptionNumber,
a.Tick,
a.OriginalTick,
a.TxHash,
a.BlockHeight,
a.TxIndex,
a.Timestamp,
a.Pkscript,
a.TotalSupply,
a.Decimals,
a.LimitPerMint,
a.IsSelfMint,
}
batch.Queue(createDeployEvents, vals...)
}
br := q.db.SendBatch(ctx, batch)
return &CreateDeployEventsBatchResults{br, len(arg), false}
}
func (b *CreateDeployEventsBatchResults) Exec(f func(int, error)) {
defer b.br.Close()
for t := 0; t < b.tot; t++ {
if b.closed {
if f != nil {
f(t, ErrBatchAlreadyClosed)
}
continue
}
_, err := b.br.Exec()
if f != nil {
f(t, err)
}
}
}
func (b *CreateDeployEventsBatchResults) Close() error {
b.closed = true
return b.br.Close()
}
const createInscriptionEntries = `-- name: CreateInscriptionEntries :batchexec
INSERT INTO "brc20_inscription_entries" ("id", "number", "sequence_number", "delegate", "metadata", "metaprotocol", "parents", "pointer", "content", "content_encoding", "content_type", "cursed", "cursed_for_brc20", "created_at", "created_at_height") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)
`
@@ -211,3 +282,261 @@ func (b *CreateInscriptionTransfersBatchResults) Close() error {
b.closed = true
return b.br.Close()
}
const createMintEvents = `-- name: CreateMintEvents :batchexec
INSERT INTO "brc20_mint_events" ("inscription_id", "inscription_number", "tick", "original_tick", "tx_hash", "block_height", "tx_index", "timestamp", "pkscript", "amount", "parent_id") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
`
type CreateMintEventsBatchResults struct {
br pgx.BatchResults
tot int
closed bool
}
type CreateMintEventsParams struct {
InscriptionID string
InscriptionNumber int64
Tick string
OriginalTick string
TxHash string
BlockHeight int32
TxIndex int32
Timestamp pgtype.Timestamp
Pkscript string
Amount pgtype.Numeric
ParentID pgtype.Text
}
func (q *Queries) CreateMintEvents(ctx context.Context, arg []CreateMintEventsParams) *CreateMintEventsBatchResults {
batch := &pgx.Batch{}
for _, a := range arg {
vals := []interface{}{
a.InscriptionID,
a.InscriptionNumber,
a.Tick,
a.OriginalTick,
a.TxHash,
a.BlockHeight,
a.TxIndex,
a.Timestamp,
a.Pkscript,
a.Amount,
a.ParentID,
}
batch.Queue(createMintEvents, vals...)
}
br := q.db.SendBatch(ctx, batch)
return &CreateMintEventsBatchResults{br, len(arg), false}
}
func (b *CreateMintEventsBatchResults) Exec(f func(int, error)) {
defer b.br.Close()
for t := 0; t < b.tot; t++ {
if b.closed {
if f != nil {
f(t, ErrBatchAlreadyClosed)
}
continue
}
_, err := b.br.Exec()
if f != nil {
f(t, err)
}
}
}
func (b *CreateMintEventsBatchResults) Close() error {
b.closed = true
return b.br.Close()
}
const createTickEntries = `-- name: CreateTickEntries :batchexec
INSERT INTO "brc20_tick_entries" ("tick", "original_tick", "total_supply", "decimals", "limit_per_mint", "is_self_mint", "deploy_inscription_id", "created_at", "created_at_height") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
`
type CreateTickEntriesBatchResults struct {
br pgx.BatchResults
tot int
closed bool
}
type CreateTickEntriesParams struct {
Tick string
OriginalTick string
TotalSupply pgtype.Numeric
Decimals int16
LimitPerMint pgtype.Numeric
IsSelfMint bool
DeployInscriptionID string
CreatedAt pgtype.Timestamp
CreatedAtHeight int32
}
func (q *Queries) CreateTickEntries(ctx context.Context, arg []CreateTickEntriesParams) *CreateTickEntriesBatchResults {
batch := &pgx.Batch{}
for _, a := range arg {
vals := []interface{}{
a.Tick,
a.OriginalTick,
a.TotalSupply,
a.Decimals,
a.LimitPerMint,
a.IsSelfMint,
a.DeployInscriptionID,
a.CreatedAt,
a.CreatedAtHeight,
}
batch.Queue(createTickEntries, vals...)
}
br := q.db.SendBatch(ctx, batch)
return &CreateTickEntriesBatchResults{br, len(arg), false}
}
func (b *CreateTickEntriesBatchResults) Exec(f func(int, error)) {
defer b.br.Close()
for t := 0; t < b.tot; t++ {
if b.closed {
if f != nil {
f(t, ErrBatchAlreadyClosed)
}
continue
}
_, err := b.br.Exec()
if f != nil {
f(t, err)
}
}
}
func (b *CreateTickEntriesBatchResults) Close() error {
b.closed = true
return b.br.Close()
}
const createTickEntryStates = `-- name: CreateTickEntryStates :batchexec
INSERT INTO "brc20_tick_entry_states" ("tick", "block_height", "minted_amount", "burned_amount", "completed_at", "completed_at_height") VALUES ($1, $2, $3, $4, $5, $6)
`
type CreateTickEntryStatesBatchResults struct {
br pgx.BatchResults
tot int
closed bool
}
type CreateTickEntryStatesParams struct {
Tick string
BlockHeight int32
MintedAmount pgtype.Numeric
BurnedAmount pgtype.Numeric
CompletedAt pgtype.Timestamp
CompletedAtHeight pgtype.Int4
}
func (q *Queries) CreateTickEntryStates(ctx context.Context, arg []CreateTickEntryStatesParams) *CreateTickEntryStatesBatchResults {
batch := &pgx.Batch{}
for _, a := range arg {
vals := []interface{}{
a.Tick,
a.BlockHeight,
a.MintedAmount,
a.BurnedAmount,
a.CompletedAt,
a.CompletedAtHeight,
}
batch.Queue(createTickEntryStates, vals...)
}
br := q.db.SendBatch(ctx, batch)
return &CreateTickEntryStatesBatchResults{br, len(arg), false}
}
func (b *CreateTickEntryStatesBatchResults) Exec(f func(int, error)) {
defer b.br.Close()
for t := 0; t < b.tot; t++ {
if b.closed {
if f != nil {
f(t, ErrBatchAlreadyClosed)
}
continue
}
_, err := b.br.Exec()
if f != nil {
f(t, err)
}
}
}
func (b *CreateTickEntryStatesBatchResults) Close() error {
b.closed = true
return b.br.Close()
}
const createTransferEvents = `-- name: CreateTransferEvents :batchexec
INSERT INTO "brc20_transfer_events" ("inscription_id", "inscription_number", "tick", "original_tick", "tx_hash", "block_height", "tx_index", "timestamp", "from_pkscript", "from_satpoint", "to_pkscript", "to_satpoint", "amount") VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
`
type CreateTransferEventsBatchResults struct {
br pgx.BatchResults
tot int
closed bool
}
type CreateTransferEventsParams struct {
InscriptionID string
InscriptionNumber int64
Tick string
OriginalTick string
TxHash string
BlockHeight int32
TxIndex int32
Timestamp pgtype.Timestamp
FromPkscript pgtype.Text
FromSatpoint pgtype.Text
ToPkscript string
ToSatpoint string
Amount pgtype.Numeric
}
func (q *Queries) CreateTransferEvents(ctx context.Context, arg []CreateTransferEventsParams) *CreateTransferEventsBatchResults {
batch := &pgx.Batch{}
for _, a := range arg {
vals := []interface{}{
a.InscriptionID,
a.InscriptionNumber,
a.Tick,
a.OriginalTick,
a.TxHash,
a.BlockHeight,
a.TxIndex,
a.Timestamp,
a.FromPkscript,
a.FromSatpoint,
a.ToPkscript,
a.ToSatpoint,
a.Amount,
}
batch.Queue(createTransferEvents, vals...)
}
br := q.db.SendBatch(ctx, batch)
return &CreateTransferEventsBatchResults{br, len(arg), false}
}
func (b *CreateTransferEventsBatchResults) Exec(f func(int, error)) {
defer b.br.Close()
for t := 0; t < b.tot; t++ {
if b.closed {
if f != nil {
f(t, ErrBatchAlreadyClosed)
}
continue
}
_, err := b.br.Exec()
if f != nil {
f(t, err)
}
}
}
func (b *CreateTransferEventsBatchResults) Close() error {
b.closed = true
return b.br.Close()
}

View File

@@ -125,21 +125,21 @@ func (q *Queries) DeleteProcessorStatsSinceHeight(ctx context.Context, blockHeig
return err
}
const deleteTickStatesSinceHeight = `-- name: DeleteTickStatesSinceHeight :exec
DELETE FROM "brc20_tick_states" WHERE "block_height" >= $1
const deleteTickEntriesSinceHeight = `-- name: DeleteTickEntriesSinceHeight :exec
DELETE FROM "brc20_tick_entries" WHERE "created_at_height" >= $1
`
func (q *Queries) DeleteTickStatesSinceHeight(ctx context.Context, blockHeight int32) error {
_, err := q.db.Exec(ctx, deleteTickStatesSinceHeight, blockHeight)
func (q *Queries) DeleteTickEntriesSinceHeight(ctx context.Context, createdAtHeight int32) error {
_, err := q.db.Exec(ctx, deleteTickEntriesSinceHeight, createdAtHeight)
return err
}
const deleteTicksSinceHeight = `-- name: DeleteTicksSinceHeight :exec
DELETE FROM "brc20_ticks" WHERE "created_at_height" >= $1
const deleteTickEntryStatesSinceHeight = `-- name: DeleteTickEntryStatesSinceHeight :exec
DELETE FROM "brc20_tick_entry_states" WHERE "block_height" >= $1
`
func (q *Queries) DeleteTicksSinceHeight(ctx context.Context, createdAtHeight int32) error {
_, err := q.db.Exec(ctx, deleteTicksSinceHeight, createdAtHeight)
func (q *Queries) DeleteTickEntryStatesSinceHeight(ctx context.Context, blockHeight int32) error {
_, err := q.db.Exec(ctx, deleteTickEntryStatesSinceHeight, blockHeight)
return err
}
@@ -334,3 +334,67 @@ func (q *Queries) GetLatestProcessorStats(ctx context.Context) (Brc20ProcessorSt
)
return i, err
}
const getTickEntriesByTicks = `-- name: GetTickEntriesByTicks :many
WITH "states" AS (
-- select latest state
SELECT DISTINCT ON ("tick") tick, block_height, minted_amount, burned_amount, completed_at, completed_at_height FROM "brc20_tick_entry_states" WHERE "tick" = ANY($1::text[]) ORDER BY "tick", "block_height" DESC
)
SELECT brc20_tick_entries.tick, original_tick, total_supply, decimals, limit_per_mint, is_self_mint, deploy_inscription_id, created_at, created_at_height, states.tick, block_height, minted_amount, burned_amount, completed_at, completed_at_height FROM "brc20_tick_entries"
LEFT JOIN "states" ON "brc20_tick_entries"."tick" = "states"."tick"
WHERE "brc20_tick_entries"."tick" = ANY($1::text[])
`
type GetTickEntriesByTicksRow struct {
Tick string
OriginalTick string
TotalSupply pgtype.Numeric
Decimals int16
LimitPerMint pgtype.Numeric
IsSelfMint bool
DeployInscriptionID string
CreatedAt pgtype.Timestamp
CreatedAtHeight int32
Tick_2 pgtype.Text
BlockHeight pgtype.Int4
MintedAmount pgtype.Numeric
BurnedAmount pgtype.Numeric
CompletedAt pgtype.Timestamp
CompletedAtHeight pgtype.Int4
}
func (q *Queries) GetTickEntriesByTicks(ctx context.Context, ticks []string) ([]GetTickEntriesByTicksRow, error) {
rows, err := q.db.Query(ctx, getTickEntriesByTicks, ticks)
if err != nil {
return nil, err
}
defer rows.Close()
var items []GetTickEntriesByTicksRow
for rows.Next() {
var i GetTickEntriesByTicksRow
if err := rows.Scan(
&i.Tick,
&i.OriginalTick,
&i.TotalSupply,
&i.Decimals,
&i.LimitPerMint,
&i.IsSelfMint,
&i.DeployInscriptionID,
&i.CreatedAt,
&i.CreatedAtHeight,
&i.Tick_2,
&i.BlockHeight,
&i.MintedAmount,
&i.BurnedAmount,
&i.CompletedAt,
&i.CompletedAtHeight,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}

View File

@@ -17,19 +17,20 @@ type Brc20Balance struct {
}
type Brc20DeployEvent struct {
Id int64
InscriptionID string
Tick string
OriginalTick string
TxHash string
BlockHeight int32
TxIndex int32
Timestamp pgtype.Timestamp
Pkscript string
TotalSupply pgtype.Numeric
Decimals int16
LimitPerMint pgtype.Numeric
IsSelfMint bool
Id int64
InscriptionID string
InscriptionNumber int64
Tick string
OriginalTick string
TxHash string
BlockHeight int32
TxIndex int32
Timestamp pgtype.Timestamp
Pkscript string
TotalSupply pgtype.Numeric
Decimals int16
LimitPerMint pgtype.Numeric
IsSelfMint bool
}
type Brc20IndexedBlock struct {
@@ -88,17 +89,18 @@ type Brc20InscriptionTransfer struct {
}
type Brc20MintEvent struct {
Id int64
InscriptionID string
Tick string
OriginalTick string
TxHash string
BlockHeight int32
TxIndex int32
Timestamp pgtype.Timestamp
Pkscript string
Amount pgtype.Numeric
ParentID pgtype.Text
Id int64
InscriptionID string
InscriptionNumber int64
Tick string
OriginalTick string
TxHash string
BlockHeight int32
TxIndex int32
Timestamp pgtype.Timestamp
Pkscript string
Amount pgtype.Numeric
ParentID pgtype.Text
}
type Brc20ProcessorStat struct {
@@ -108,7 +110,7 @@ type Brc20ProcessorStat struct {
LostSats int64
}
type Brc20Tick struct {
type Brc20TickEntry struct {
Tick string
OriginalTick string
TotalSupply pgtype.Numeric
@@ -120,7 +122,7 @@ type Brc20Tick struct {
CreatedAtHeight int32
}
type Brc20TickState struct {
type Brc20TickEntryState struct {
Tick string
BlockHeight int32
MintedAmount pgtype.Numeric
@@ -130,15 +132,18 @@ type Brc20TickState struct {
}
type Brc20TransferEvent struct {
Id int64
InscriptionID string
Tick string
OriginalTick string
TxHash string
BlockHeight int32
TxIndex int32
Timestamp pgtype.Timestamp
FromPkscript pgtype.Text
ToPkscript string
Amount pgtype.Numeric
Id int64
InscriptionID string
InscriptionNumber int64
Tick string
OriginalTick string
TxHash string
BlockHeight int32
TxIndex int32
Timestamp pgtype.Timestamp
FromPkscript pgtype.Text
FromSatpoint pgtype.Text
ToPkscript string
ToSatpoint string
Amount pgtype.Numeric
}

View File

@@ -11,10 +11,39 @@ import (
"github.com/gaze-network/indexer-network/modules/brc20/internal/entity"
"github.com/gaze-network/indexer-network/modules/brc20/internal/ordinals"
"github.com/gaze-network/indexer-network/modules/brc20/internal/repository/postgres/gen"
"github.com/gaze-network/uint128"
"github.com/jackc/pgx/v5/pgtype"
"github.com/samber/lo"
)
func uint128FromNumeric(src pgtype.Numeric) (*uint128.Uint128, error) {
if !src.Valid {
return nil, nil
}
bytes, err := src.MarshalJSON()
if err != nil {
return nil, errors.WithStack(err)
}
result, err := uint128.FromString(string(bytes))
if err != nil {
return nil, errors.WithStack(err)
}
return &result, nil
}
func numericFromUint128(src *uint128.Uint128) (pgtype.Numeric, error) {
if src == nil {
return pgtype.Numeric{}, nil
}
bytes := []byte(src.String())
var result pgtype.Numeric
err := result.UnmarshalJSON(bytes)
if err != nil {
return pgtype.Numeric{}, errors.WithStack(err)
}
return result, nil
}
func mapIndexerStatesModelToType(src gen.Brc20IndexerState) entity.IndexerState {
var createdAt time.Time
if src.CreatedAt.Valid {
@@ -86,6 +115,85 @@ func mapProcessorStatsTypeToParams(src entity.ProcessorStats) gen.CreateProcesso
}
}
func mapTickEntryModelToType(src gen.GetTickEntriesByTicksRow) (entity.TickEntry, error) {
totalSupply, err := uint128FromNumeric(src.TotalSupply)
if err != nil {
return entity.TickEntry{}, errors.Wrap(err, "cannot parse totalSupply")
}
limitPerMint, err := uint128FromNumeric(src.LimitPerMint)
if err != nil {
return entity.TickEntry{}, errors.Wrap(err, "cannot parse limitPerMint")
}
deployInscriptionId, err := ordinals.NewInscriptionIdFromString(src.DeployInscriptionID)
if err != nil {
return entity.TickEntry{}, errors.Wrap(err, "invalid deployInscriptionId")
}
mintedAmount, err := uint128FromNumeric(src.MintedAmount)
if err != nil {
return entity.TickEntry{}, errors.Wrap(err, "cannot parse mintedAmount")
}
burnedAmount, err := uint128FromNumeric(src.BurnedAmount)
if err != nil {
return entity.TickEntry{}, errors.Wrap(err, "cannot parse burnedAmount")
}
var completedAt time.Time
if src.CompletedAt.Valid {
completedAt = src.CompletedAt.Time
}
return entity.TickEntry{
Tick: src.Tick,
OriginalTick: src.OriginalTick,
TotalSupply: lo.FromPtr(totalSupply),
Decimals: uint16(src.Decimals),
LimitPerMint: lo.FromPtr(limitPerMint),
IsSelfMint: src.IsSelfMint,
DeployInscriptionId: deployInscriptionId,
CreatedAt: src.CreatedAt.Time,
CreatedAtHeight: uint64(src.CreatedAtHeight),
MintedAmount: lo.FromPtr(mintedAmount),
BurnedAmount: lo.FromPtr(burnedAmount),
CompletedAt: completedAt,
CompletedAtHeight: lo.Ternary(src.CompletedAtHeight.Valid, uint64(src.CompletedAtHeight.Int32), 0),
}, nil
}
func mapTickEntryTypeToParams(src entity.TickEntry, blockHeight uint64) (gen.CreateTickEntriesParams, gen.CreateTickEntryStatesParams, error) {
totalSupply, err := numericFromUint128(&src.TotalSupply)
if err != nil {
return gen.CreateTickEntriesParams{}, gen.CreateTickEntryStatesParams{}, errors.Wrap(err, "cannot convert totalSupply")
}
limitPerMint, err := numericFromUint128(&src.LimitPerMint)
if err != nil {
return gen.CreateTickEntriesParams{}, gen.CreateTickEntryStatesParams{}, errors.Wrap(err, "cannot convert limitPerMint")
}
mintedAmount, err := numericFromUint128(&src.MintedAmount)
if err != nil {
return gen.CreateTickEntriesParams{}, gen.CreateTickEntryStatesParams{}, errors.Wrap(err, "cannot convert mintedAmount")
}
burnedAmount, err := numericFromUint128(&src.BurnedAmount)
if err != nil {
return gen.CreateTickEntriesParams{}, gen.CreateTickEntryStatesParams{}, errors.Wrap(err, "cannot convert burnedAmount")
}
return gen.CreateTickEntriesParams{
Tick: src.Tick,
OriginalTick: src.OriginalTick,
TotalSupply: totalSupply,
Decimals: int16(src.Decimals),
LimitPerMint: limitPerMint,
IsSelfMint: src.IsSelfMint,
DeployInscriptionID: src.DeployInscriptionId.String(),
CreatedAt: pgtype.Timestamp{Time: src.CreatedAt, Valid: true},
CreatedAtHeight: int32(src.CreatedAtHeight),
}, gen.CreateTickEntryStatesParams{
Tick: src.Tick,
BlockHeight: int32(blockHeight),
CompletedAt: pgtype.Timestamp{Time: src.CompletedAt, Valid: !src.CompletedAt.IsZero()},
CompletedAtHeight: pgtype.Int4{Int32: int32(src.CompletedAtHeight), Valid: src.CompletedAtHeight != 0},
MintedAmount: mintedAmount,
BurnedAmount: burnedAmount,
}, nil
}
func mapInscriptionEntryModelToType(src gen.GetInscriptionEntriesByIdsRow) (ordinals.InscriptionEntry, error) {
inscriptionId, err := ordinals.NewInscriptionIdFromString(src.Id)
if err != nil {
@@ -250,3 +358,213 @@ func mapInscriptionTransferTypeToParams(src entity.InscriptionTransfer) gen.Crea
SentAsFee: src.SentAsFee,
}
}
func mapEventDeployModelToType(src gen.Brc20DeployEvent) (entity.EventDeploy, error) {
inscriptionId, err := ordinals.NewInscriptionIdFromString(src.InscriptionID)
if err != nil {
return entity.EventDeploy{}, errors.Wrap(err, "invalid inscription id")
}
txHash, err := chainhash.NewHashFromStr(src.TxHash)
if err != nil {
return entity.EventDeploy{}, errors.Wrap(err, "invalid tx hash")
}
pkScript, err := hex.DecodeString(src.Pkscript)
if err != nil {
return entity.EventDeploy{}, errors.Wrap(err, "failed to parse pkscript")
}
totalSupply, err := uint128FromNumeric(src.TotalSupply)
if err != nil {
return entity.EventDeploy{}, errors.Wrap(err, "cannot parse totalSupply")
}
limitPerMint, err := uint128FromNumeric(src.LimitPerMint)
if err != nil {
return entity.EventDeploy{}, errors.Wrap(err, "cannot parse limitPerMint")
}
return entity.EventDeploy{
Id: uint64(src.Id),
InscriptionId: inscriptionId,
InscriptionNumber: uint64(src.InscriptionNumber),
Tick: src.Tick,
OriginalTick: src.OriginalTick,
TxHash: *txHash,
BlockHeight: uint64(src.BlockHeight),
TxIndex: uint32(src.TxIndex),
Timestamp: src.Timestamp.Time,
PkScript: pkScript,
TotalSupply: lo.FromPtr(totalSupply),
Decimals: uint16(src.Decimals),
LimitPerMint: lo.FromPtr(limitPerMint),
IsSelfMint: src.IsSelfMint,
}, nil
}
func mapEventDeployTypeToParams(src entity.EventDeploy) (gen.CreateDeployEventsParams, error) {
var timestamp pgtype.Timestamp
if !src.Timestamp.IsZero() {
timestamp = pgtype.Timestamp{Time: src.Timestamp, Valid: true}
}
totalSupply, err := numericFromUint128(&src.TotalSupply)
if err != nil {
return gen.CreateDeployEventsParams{}, errors.Wrap(err, "cannot convert totalSupply")
}
limitPerMint, err := numericFromUint128(&src.LimitPerMint)
if err != nil {
return gen.CreateDeployEventsParams{}, errors.Wrap(err, "cannot convert limitPerMint")
}
return gen.CreateDeployEventsParams{
InscriptionID: src.InscriptionId.String(),
InscriptionNumber: int64(src.InscriptionNumber),
Tick: src.Tick,
OriginalTick: src.OriginalTick,
TxHash: src.TxHash.String(),
BlockHeight: int32(src.BlockHeight),
TxIndex: int32(src.TxIndex),
Timestamp: timestamp,
Pkscript: hex.EncodeToString(src.PkScript),
TotalSupply: totalSupply,
Decimals: int16(src.Decimals),
LimitPerMint: limitPerMint,
IsSelfMint: src.IsSelfMint,
}, nil
}
func mapEventMintModelToType(src gen.Brc20MintEvent) (entity.EventMint, error) {
inscriptionId, err := ordinals.NewInscriptionIdFromString(src.InscriptionID)
if err != nil {
return entity.EventMint{}, errors.Wrap(err, "invalid inscription id")
}
txHash, err := chainhash.NewHashFromStr(src.TxHash)
if err != nil {
return entity.EventMint{}, errors.Wrap(err, "invalid tx hash")
}
pkScript, err := hex.DecodeString(src.Pkscript)
if err != nil {
return entity.EventMint{}, errors.Wrap(err, "failed to parse pkscript")
}
amount, err := uint128FromNumeric(src.Amount)
if err != nil {
return entity.EventMint{}, errors.Wrap(err, "cannot parse amount")
}
return entity.EventMint{
Id: uint64(src.Id),
InscriptionId: inscriptionId,
InscriptionNumber: uint64(src.InscriptionNumber),
Tick: src.Tick,
OriginalTick: src.OriginalTick,
TxHash: *txHash,
BlockHeight: uint64(src.BlockHeight),
TxIndex: uint32(src.TxIndex),
Timestamp: src.Timestamp.Time,
PkScript: pkScript,
Amount: lo.FromPtr(amount),
}, nil
}
func mapEventMintTypeToParams(src entity.EventMint) (gen.CreateMintEventsParams, error) {
var timestamp pgtype.Timestamp
if !src.Timestamp.IsZero() {
timestamp = pgtype.Timestamp{Time: src.Timestamp, Valid: true}
}
amount, err := numericFromUint128(&src.Amount)
if err != nil {
return gen.CreateMintEventsParams{}, errors.Wrap(err, "cannot convert amount")
}
return gen.CreateMintEventsParams{
InscriptionID: src.InscriptionId.String(),
InscriptionNumber: int64(src.InscriptionNumber),
Tick: src.Tick,
OriginalTick: src.OriginalTick,
TxHash: src.TxHash.String(),
BlockHeight: int32(src.BlockHeight),
TxIndex: int32(src.TxIndex),
Timestamp: timestamp,
Pkscript: hex.EncodeToString(src.PkScript),
Amount: amount,
}, nil
}
func mapEventTransferModelToType(src gen.Brc20TransferEvent) (entity.EventTransfer, error) {
inscriptionId, err := ordinals.NewInscriptionIdFromString(src.InscriptionID)
if err != nil {
return entity.EventTransfer{}, errors.Wrap(err, "cannot parse inscription id")
}
txHash, err := chainhash.NewHashFromStr(src.TxHash)
if err != nil {
return entity.EventTransfer{}, errors.Wrap(err, "cannot parse hash")
}
var fromPkScript []byte
if src.FromPkscript.Valid {
fromPkScript, err = hex.DecodeString(src.FromPkscript.String)
if err != nil {
return entity.EventTransfer{}, errors.Wrap(err, "cannot parse fromPkScript")
}
}
var fromSatPoint ordinals.SatPoint
if src.FromSatpoint.Valid {
fromSatPoint, err = ordinals.NewSatPointFromString(src.FromSatpoint.String)
if err != nil {
return entity.EventTransfer{}, errors.Wrap(err, "cannot parse fromSatPoint")
}
}
toPkScript, err := hex.DecodeString(src.ToPkscript)
if err != nil {
return entity.EventTransfer{}, errors.Wrap(err, "cannot parse toPkScript")
}
toSatPoint, err := ordinals.NewSatPointFromString(src.ToSatpoint)
if err != nil {
return entity.EventTransfer{}, errors.Wrap(err, "cannot parse toSatPoint")
}
amount, err := uint128FromNumeric(src.Amount)
if err != nil {
return entity.EventTransfer{}, errors.Wrap(err, "cannot parse amount")
}
return entity.EventTransfer{
Id: uint64(src.Id),
InscriptionId: inscriptionId,
InscriptionNumber: uint64(src.InscriptionNumber),
Tick: src.Tick,
OriginalTick: src.OriginalTick,
TxHash: *txHash,
BlockHeight: uint64(src.BlockHeight),
TxIndex: uint32(src.TxIndex),
Timestamp: src.Timestamp.Time,
FromPkScript: fromPkScript,
FromSatPoint: fromSatPoint,
ToPkScript: toPkScript,
ToSatPoint: toSatPoint,
Amount: lo.FromPtr(amount),
}, nil
}
func mapEventTransferTypeToParams(src entity.EventTransfer) (gen.CreateTransferEventsParams, error) {
var timestamp pgtype.Timestamp
if !src.Timestamp.IsZero() {
timestamp = pgtype.Timestamp{Time: src.Timestamp, Valid: true}
}
amount, err := numericFromUint128(&src.Amount)
if err != nil {
return gen.CreateTransferEventsParams{}, errors.Wrap(err, "cannot convert amount")
}
var fromPkScript, fromSatPoint pgtype.Text
if src.FromPkScript != nil {
fromPkScript = pgtype.Text{String: hex.EncodeToString(src.FromPkScript), Valid: true}
}
if src.FromSatPoint != (ordinals.SatPoint{}) {
fromSatPoint = pgtype.Text{String: src.FromSatPoint.String(), Valid: true}
}
return gen.CreateTransferEventsParams{
InscriptionID: src.InscriptionId.String(),
InscriptionNumber: int64(src.InscriptionNumber),
Tick: src.Tick,
OriginalTick: src.OriginalTick,
TxHash: src.TxHash.String(),
BlockHeight: int32(src.BlockHeight),
TxIndex: int32(src.TxIndex),
Timestamp: timestamp,
FromPkscript: fromPkScript,
FromSatpoint: fromSatPoint,
ToPkscript: hex.EncodeToString(src.ToPkScript),
ToSatpoint: src.ToSatPoint.String(),
Amount: amount,
}, nil
}

View File

@@ -172,10 +172,10 @@ func (p *Processor) RevertData(ctx context.Context, from int64) error {
if err := brc20DgTx.DeleteProcessorStatsSinceHeight(ctx, uint64(from)); err != nil {
return errors.Wrap(err, "failed to delete processor stats")
}
if err := brc20DgTx.DeleteTicksSinceHeight(ctx, uint64(from)); err != nil {
if err := brc20DgTx.DeleteTickEntriesSinceHeight(ctx, uint64(from)); err != nil {
return errors.Wrap(err, "failed to delete ticks")
}
if err := brc20DgTx.DeleteTickStatesSinceHeight(ctx, uint64(from)); err != nil {
if err := brc20DgTx.DeleteTickEntryStatesSinceHeight(ctx, uint64(from)); err != nil {
return errors.Wrap(err, "failed to delete tick states")
}
if err := brc20DgTx.DeleteDeployEventsSinceHeight(ctx, uint64(from)); err != nil {

View File

@@ -2,10 +2,120 @@ package brc20
import (
"context"
"time"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/core/types"
"github.com/gaze-network/indexer-network/modules/brc20/internal/brc20"
"github.com/gaze-network/indexer-network/modules/brc20/internal/entity"
"github.com/gaze-network/indexer-network/pkg/logger"
"github.com/gaze-network/indexer-network/pkg/logger/slogx"
"github.com/gaze-network/uint128"
"github.com/samber/lo"
)
func (p *Processor) processBRC20States(ctx context.Context, transfers []*entity.InscriptionTransfer) error {
panic("not implemented")
func (p *Processor) processBRC20States(ctx context.Context, transfers []*entity.InscriptionTransfer, blockHeader types.BlockHeader) error {
payloads := make([]*brc20.Payload, 0)
ticks := make(map[string]struct{})
for _, transfer := range transfers {
payload, err := brc20.ParsePayload(transfer)
if err != nil {
return errors.Wrap(err, "failed to parse payload")
}
payloads = append(payloads, payload)
ticks[payload.Tick] = struct{}{}
}
entries, err := p.getTickEntriesByTicks(ctx, lo.Keys(ticks))
if err != nil {
return errors.Wrap(err, "failed to get inscription entries by ids")
}
newTickEntries := make(map[string]*entity.TickEntry)
newTickEntryStates := make(map[string]*entity.TickEntry)
// newDeployEvents := make([]*entity.EventDeploy, 0)
// newMintEvents := make([]*entity.EventMint, 0)
// newTransferEvents := make([]*entity.EventTransfer, 0)
for _, payload := range payloads {
entry := entries[payload.Tick]
switch payload.Op {
case brc20.OperationDeploy:
if entry != nil {
logger.DebugContext(ctx, "found deploy inscription but tick already exists, skipping...",
slogx.String("tick", payload.Tick),
slogx.Stringer("entryInscriptionId", entry.DeployInscriptionId),
slogx.Stringer("currentInscriptionId", payload.Transfer.InscriptionId),
)
continue
}
tickEntry := &entity.TickEntry{
Tick: payload.Tick,
OriginalTick: payload.OriginalTick,
TotalSupply: payload.Max,
Decimals: payload.Dec,
LimitPerMint: payload.Lim,
IsSelfMint: payload.SelfMint,
DeployInscriptionId: payload.Transfer.InscriptionId,
CreatedAt: blockHeader.Timestamp,
CreatedAtHeight: uint64(blockHeader.Height),
MintedAmount: uint128.Zero,
BurnedAmount: uint128.Zero,
CompletedAt: time.Time{},
CompletedAtHeight: 0,
}
newTickEntries[payload.Tick] = tickEntry
newTickEntryStates[payload.Tick] = tickEntry
// update entries for other operations in same block
entries[payload.Tick] = tickEntry
// TODO: handle deploy action
case brc20.OperationMint:
if entry == nil {
logger.DebugContext(ctx, "found mint inscription but tick does not exist, skipping...",
slogx.String("tick", payload.Tick),
slogx.Stringer("inscriptionId", payload.Transfer.InscriptionId),
)
continue
}
if brc20.IsAmountWithinDecimals(payload.Amt, entry.Decimals) {
logger.DebugContext(ctx, "found mint inscription but amount has invalid decimals, skipping...",
slogx.String("tick", payload.Tick),
slogx.Stringer("inscriptionId", payload.Transfer.InscriptionId),
slogx.Stringer("amount", payload.Amt),
slogx.Uint16("decimals", entry.Decimals),
)
continue
}
// TODO: handle mint action
case brc20.OperationTransfer:
if entry == nil {
logger.DebugContext(ctx, "found transfer inscription but tick does not exist, skipping...",
slogx.String("tick", payload.Tick),
slogx.Stringer("inscriptionId", payload.Transfer.InscriptionId),
)
continue
}
if brc20.IsAmountWithinDecimals(payload.Amt, entry.Decimals) {
logger.DebugContext(ctx, "found transfer inscription but amount has invalid decimals, skipping...",
slogx.String("tick", payload.Tick),
slogx.Stringer("inscriptionId", payload.Transfer.InscriptionId),
slogx.Stringer("amount", payload.Amt),
slogx.Uint16("decimals", entry.Decimals),
)
continue
}
// TODO: handle transfer action
}
}
return nil
}
func (p *Processor) getTickEntriesByTicks(ctx context.Context, ticks []string) (map[string]*entity.TickEntry, error) {
// TODO: get from buffer if exists
result, err := p.brc20Dg.GetTickEntriesByTicks(ctx, ticks)
if err != nil {
return nil, errors.Wrap(err, "failed to get tick entries by ticks")
}
return result, nil
}

View File

@@ -369,6 +369,10 @@ type brc20Inscription struct {
}
func isBRC20Inscription(inscription ordinals.Inscription) bool {
if inscription.ContentType != "application/json" && inscription.ContentType != "text/plain" {
return false
}
// attempt to parse content as json
if inscription.Content == nil {
return false
@@ -469,18 +473,43 @@ func (p *Processor) getInscriptionTransfersInOutPoints(ctx context.Context, outP
return result, nil
}
func (p *Processor) getInscriptionEntryById(ctx context.Context, inscriptionId ordinals.InscriptionId) (*ordinals.InscriptionEntry, error) {
if inscriptionEntry, ok := p.newInscriptionEntryStates[inscriptionId]; ok {
return inscriptionEntry, nil
}
inscription, err := p.brc20Dg.GetInscriptionEntryById(ctx, inscriptionId)
func (p *Processor) getInscriptionEntryById(ctx context.Context, id ordinals.InscriptionId) (*ordinals.InscriptionEntry, error) {
inscriptions, err := p.brc20Dg.GetInscriptionEntriesByIds(ctx, []ordinals.InscriptionId{id})
if err != nil {
return nil, errors.Wrap(err, "failed to get inscriptions by outpoint")
}
inscription, ok := inscriptions[id]
if !ok {
return nil, errors.Wrap(errs.NotFound, "inscription not found")
}
return inscription, nil
}
func (p *Processor) getInscriptionEntriesByIds(ctx context.Context, ids []ordinals.InscriptionId) (map[ordinals.InscriptionId]*ordinals.InscriptionEntry, error) {
// try to get from cache if exists
result := make(map[ordinals.InscriptionId]*ordinals.InscriptionEntry)
idsToFetch := make([]ordinals.InscriptionId, 0)
for _, id := range ids {
if inscriptionEntry, ok := p.newInscriptionEntryStates[id]; ok {
result[id] = inscriptionEntry
} else {
idsToFetch = append(idsToFetch, id)
}
}
if len(idsToFetch) == 0 {
inscriptions, err := p.brc20Dg.GetInscriptionEntriesByIds(ctx, idsToFetch)
if err != nil {
return nil, errors.Wrap(err, "failed to get inscriptions by outpoint")
}
for id, inscription := range inscriptions {
result[id] = inscription
}
}
return result, nil
}
func (p *Processor) getBlockSubsidy(blockHeight uint64) uint64 {
return uint64(blockchain.CalcBlockSubsidy(int32(blockHeight), p.network.ChainParams()))
}

View File

@@ -17,6 +17,9 @@ import (
// Process implements indexer.Processor.
func (p *Processor) Process(ctx context.Context, blocks []*types.Block) error {
for _, block := range blocks {
if block.Header.Height == 779832 {
panic("stop")
}
ctx = logger.WithContext(ctx, slogx.Uint64("height", uint64(block.Header.Height)))
logger.DebugContext(ctx, "Processing new block")
p.blockReward = p.getBlockSubsidy(uint64(block.Header.Height))