Merge branch 'swap'

This commit is contained in:
jiedo
2024-05-15 10:08:45 +08:00
45 changed files with 8490 additions and 581 deletions

View File

@@ -3,38 +3,70 @@ package main
import (
"flag"
"log"
"os"
"strconv"
"github.com/btcsuite/btcd/chaincfg"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/indexer"
"github.com/unisat-wallet/libbrc20-indexer/loader"
)
var (
inputfile string
outputfile string
testnet bool
inputfile string
outputfile string
outputModulefile string
testnet bool
)
func init() {
flag.BoolVar(&testnet, "testnet", false, "testnet")
flag.StringVar(&inputfile, "input", "./data/brc20.input.txt", "the filename of input data, default(./data/brc20.input.txt)")
flag.StringVar(&outputfile, "output", "./data/brc20.output.txt", "the filename of output result, default(./data/brc20.output.txt)")
flag.StringVar(&outputfile, "output", "./data/brc20.output.txt", "the filename of output data, default(./data/brc20.output.txt)")
flag.StringVar(&outputModulefile, "output_module", "./data/module.output.txt", "the filename of output data, default(./data/module.output.txt)")
flag.Parse()
if testnet {
conf.GlobalNetParams = &chaincfg.TestNet3Params
}
if ticks := os.Getenv("TICKS_ENABLED"); ticks != "" {
conf.TICKS_ENABLED = ticks
}
if id := os.Getenv("MODULE_SWAP_SOURCE_INSCRIPTION_ID"); id != "" {
conf.MODULE_SWAP_SOURCE_INSCRIPTION_ID = id
}
if heightStr := os.Getenv("BRC20_ENABLE_SELF_MINT_HEIGHT"); heightStr != "" {
if h, err := strconv.Atoi(heightStr); err != nil {
conf.ENABLE_SELF_MINT_HEIGHT = uint32(h)
}
}
}
func main() {
brc20Datas, err := loader.LoadBRC20InputData(inputfile)
if err != nil {
log.Fatalf("invalid input, %s", err)
}
brc20Datas := make(chan interface{}, 10240)
go func() {
if err := loader.LoadBRC20InputData(inputfile, brc20Datas); err != nil {
log.Printf("invalid input, %s", err)
}
close(brc20Datas)
}()
g := &indexer.BRC20Indexer{}
g.ProcessUpdateLatestBRC20(brc20Datas)
g := &indexer.BRC20ModuleIndexer{}
g.Init()
g.ProcessUpdateLatestBRC20Loop(brc20Datas, nil)
loader.DumpTickerInfoMap(outputfile,
g.HistoryData,
g.InscriptionsTickerInfoMap,
g.UserTokensBalanceData,
g.TokenUsersBalanceData,
testnet,
)
loader.DumpModuleInfoMap(outputModulefile,
g.ModulesInfoMap,
)
}

13
conf/conf.go Normal file
View File

@@ -0,0 +1,13 @@
package conf
import (
"github.com/btcsuite/btcd/chaincfg"
)
var (
DEBUG = false
MODULE_SWAP_SOURCE_INSCRIPTION_ID = "d2a30f6131324e06b1366876c8c089d7ad2a9c2b0ea971c5b0dc6198615bda2ei0"
GlobalNetParams = &chaincfg.MainNetParams
TICKS_ENABLED = ""
ENABLE_SELF_MINT_HEIGHT uint32 = 837090
)

View File

@@ -1,8 +1,12 @@
package constant
const MEMPOOL_HEIGHT = 0x3fffff // 3fffff, 2^22-1
// brc20 protocal
const (
BRC20_P = "brc-20"
BRC20_P = "brc-20"
BRC20_P_MODULE = "brc20-module"
BRC20_P_SWAP = "brc20-swap"
)
// brc20 op
@@ -12,12 +16,6 @@ const (
BRC20_OP_TRANSFER = "transfer"
)
const (
BRC20_OP_N_DEPLOY = 0
BRC20_OP_N_MINT = 1
BRC20_OP_N_TRANSFER = 2
)
// brc20 history
const (
BRC20_HISTORY_TYPE_INSCRIBE_DEPLOY = "inscribe-deploy"
@@ -29,22 +27,61 @@ const (
)
const (
// brc20 history N
BRC20_HISTORY_TYPE_N_INSCRIBE_DEPLOY uint8 = 0
BRC20_HISTORY_TYPE_N_INSCRIBE_MINT uint8 = 1
BRC20_HISTORY_TYPE_N_INSCRIBE_TRANSFER uint8 = 2
BRC20_HISTORY_TYPE_N_TRANSFER uint8 = 3
BRC20_HISTORY_TYPE_N_SEND uint8 = 4
BRC20_HISTORY_TYPE_N_RECEIVE uint8 = 5
// swap history N
BRC20_HISTORY_MODULE_TYPE_N_INSCRIBE_MODULE uint8 = 6
BRC20_HISTORY_MODULE_TYPE_N_INSCRIBE_WITHDRAW uint8 = 7
BRC20_HISTORY_MODULE_TYPE_N_WITHDRAW_FROM uint8 = 8
BRC20_HISTORY_MODULE_TYPE_N_WITHDRAW_TO uint8 = 9
BRC20_HISTORY_SWAP_TYPE_N_INSCRIBE_APPROVE uint8 = 10
BRC20_HISTORY_SWAP_TYPE_N_APPROVE uint8 = 11
BRC20_HISTORY_SWAP_TYPE_N_INSCRIBE_CONDITIONAL_APPROVE uint8 = 12
BRC20_HISTORY_SWAP_TYPE_N_CONDITIONAL_APPROVE uint8 = 13
BRC20_HISTORY_SWAP_TYPE_N_APPROVE_FROM uint8 = 14
BRC20_HISTORY_SWAP_TYPE_N_APPROVE_TO uint8 = 15
BRC20_HISTORY_SWAP_TYPE_N_INSCRIBE_COMMIT uint8 = 16
BRC20_HISTORY_SWAP_TYPE_N_COMMIT uint8 = 17
)
var BRC20_HISTORY_TYPES_TO_N map[string]uint8 = map[string]uint8{
BRC20_HISTORY_TYPE_INSCRIBE_DEPLOY: BRC20_HISTORY_TYPE_N_INSCRIBE_DEPLOY,
BRC20_HISTORY_TYPE_INSCRIBE_MINT: BRC20_HISTORY_TYPE_N_INSCRIBE_MINT,
BRC20_HISTORY_TYPE_INSCRIBE_TRANSFER: BRC20_HISTORY_TYPE_N_INSCRIBE_TRANSFER,
BRC20_HISTORY_TYPE_TRANSFER: BRC20_HISTORY_TYPE_N_TRANSFER,
BRC20_HISTORY_TYPE_SEND: BRC20_HISTORY_TYPE_N_SEND,
BRC20_HISTORY_TYPE_RECEIVE: BRC20_HISTORY_TYPE_N_RECEIVE,
}
// module op
const (
BRC20_OP_MODULE_DEPLOY = "deploy"
BRC20_OP_MODULE_WITHDRAW = "withdraw"
BRC20_OP_SWAP_APPROVE = "approve"
BRC20_OP_SWAP_CONDITIONAL_APPROVE = "conditional-approve"
BRC20_OP_SWAP_COMMIT = "commit"
)
// swap history
const (
BRC20_HISTORY_MODULE_TYPE_INSCRIBE_MODULE = "inscribe-module"
BRC20_HISTORY_MODULE_TYPE_INSCRIBE_WITHDRAW = "inscribe-withdraw"
BRC20_HISTORY_MODULE_TYPE_WITHDRAW_FROM = "withdraw-from"
BRC20_HISTORY_MODULE_TYPE_WITHDRAW_TO = "withdraw-to"
BRC20_HISTORY_SWAP_TYPE_INSCRIBE_APPROVE = "inscribe-approve"
BRC20_HISTORY_SWAP_TYPE_APPROVE = "approve"
BRC20_HISTORY_SWAP_TYPE_INSCRIBE_CONDITIONAL_APPROVE = "inscribe-conditional-approve"
BRC20_HISTORY_SWAP_TYPE_CONDITIONAL_APPROVE = "conditional-approve"
BRC20_HISTORY_SWAP_TYPE_APPROVE_FROM = "approve-from"
BRC20_HISTORY_SWAP_TYPE_APPROVE_TO = "approve-to"
BRC20_HISTORY_SWAP_TYPE_INSCRIBE_COMMIT = "inscribe-commit"
BRC20_HISTORY_SWAP_TYPE_COMMIT = "commit"
)
var BRC20_HISTORY_TYPE_NAMES []string = []string{
BRC20_HISTORY_TYPE_INSCRIBE_DEPLOY,
@@ -53,6 +90,63 @@ var BRC20_HISTORY_TYPE_NAMES []string = []string{
BRC20_HISTORY_TYPE_TRANSFER,
BRC20_HISTORY_TYPE_SEND,
BRC20_HISTORY_TYPE_RECEIVE,
// module
BRC20_HISTORY_MODULE_TYPE_INSCRIBE_MODULE,
BRC20_HISTORY_MODULE_TYPE_INSCRIBE_WITHDRAW,
BRC20_HISTORY_MODULE_TYPE_WITHDRAW_FROM,
BRC20_HISTORY_MODULE_TYPE_WITHDRAW_TO,
// swap
BRC20_HISTORY_SWAP_TYPE_INSCRIBE_APPROVE,
BRC20_HISTORY_SWAP_TYPE_APPROVE,
BRC20_HISTORY_SWAP_TYPE_INSCRIBE_CONDITIONAL_APPROVE,
BRC20_HISTORY_SWAP_TYPE_CONDITIONAL_APPROVE,
BRC20_HISTORY_SWAP_TYPE_APPROVE_FROM,
BRC20_HISTORY_SWAP_TYPE_APPROVE_TO,
BRC20_HISTORY_SWAP_TYPE_INSCRIBE_COMMIT,
BRC20_HISTORY_SWAP_TYPE_COMMIT,
}
var DEFAULT_DECIMAL_18 = "18"
var BRC20_HISTORY_TYPES_TO_N map[string]uint8 = map[string]uint8{
BRC20_HISTORY_TYPE_INSCRIBE_DEPLOY: BRC20_HISTORY_TYPE_N_INSCRIBE_DEPLOY,
BRC20_HISTORY_TYPE_INSCRIBE_MINT: BRC20_HISTORY_TYPE_N_INSCRIBE_MINT,
BRC20_HISTORY_TYPE_INSCRIBE_TRANSFER: BRC20_HISTORY_TYPE_N_INSCRIBE_TRANSFER,
BRC20_HISTORY_TYPE_TRANSFER: BRC20_HISTORY_TYPE_N_TRANSFER,
BRC20_HISTORY_TYPE_SEND: BRC20_HISTORY_TYPE_N_SEND,
BRC20_HISTORY_TYPE_RECEIVE: BRC20_HISTORY_TYPE_N_RECEIVE,
// module
BRC20_HISTORY_MODULE_TYPE_INSCRIBE_MODULE: BRC20_HISTORY_MODULE_TYPE_N_INSCRIBE_MODULE,
BRC20_HISTORY_MODULE_TYPE_INSCRIBE_WITHDRAW: BRC20_HISTORY_MODULE_TYPE_N_INSCRIBE_WITHDRAW,
BRC20_HISTORY_MODULE_TYPE_WITHDRAW_FROM: BRC20_HISTORY_MODULE_TYPE_N_WITHDRAW_FROM,
BRC20_HISTORY_MODULE_TYPE_WITHDRAW_TO: BRC20_HISTORY_MODULE_TYPE_N_WITHDRAW_TO,
// swap // swap
BRC20_HISTORY_SWAP_TYPE_INSCRIBE_APPROVE: BRC20_HISTORY_SWAP_TYPE_N_INSCRIBE_APPROVE,
BRC20_HISTORY_SWAP_TYPE_APPROVE: BRC20_HISTORY_SWAP_TYPE_N_APPROVE,
BRC20_HISTORY_SWAP_TYPE_INSCRIBE_CONDITIONAL_APPROVE: BRC20_HISTORY_SWAP_TYPE_N_INSCRIBE_CONDITIONAL_APPROVE,
BRC20_HISTORY_SWAP_TYPE_CONDITIONAL_APPROVE: BRC20_HISTORY_SWAP_TYPE_N_CONDITIONAL_APPROVE,
BRC20_HISTORY_SWAP_TYPE_APPROVE_FROM: BRC20_HISTORY_SWAP_TYPE_N_APPROVE_FROM,
BRC20_HISTORY_SWAP_TYPE_APPROVE_TO: BRC20_HISTORY_SWAP_TYPE_N_APPROVE_TO,
BRC20_HISTORY_SWAP_TYPE_INSCRIBE_COMMIT: BRC20_HISTORY_SWAP_TYPE_N_INSCRIBE_COMMIT,
BRC20_HISTORY_SWAP_TYPE_COMMIT: BRC20_HISTORY_SWAP_TYPE_N_COMMIT,
}
// swap function
const (
BRC20_SWAP_FUNCTION_DEPLOY_POOL = "deployPool"
BRC20_SWAP_FUNCTION_ADD_LIQ = "addLiq"
BRC20_SWAP_FUNCTION_REMOVE_LIQ = "removeLiq"
BRC20_SWAP_FUNCTION_SWAP = "swap"
BRC20_SWAP_FUNCTION_SEND = "send"
BRC20_SWAP_FUNCTION_DECREASE_APPROVAL = "decreaseApproval"
)
const ZERO_ADDRESS_PKSCRIPT = "\x6a\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"

View File

@@ -10,43 +10,69 @@ import (
const MAX_PRECISION = 18
var precisionFactor = new(big.Int).Exp(big.NewInt(10), big.NewInt(MAX_PRECISION), nil)
var MAX_PRECISION_STRING = "18"
var precisionFactor [19]*big.Int = [19]*big.Int{
new(big.Int).Exp(big.NewInt(10), big.NewInt(0), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(1), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(2), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(3), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(4), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(5), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(6), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(7), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(8), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(9), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(10), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(11), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(12), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(13), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(14), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(15), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(16), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(17), nil),
new(big.Int).Exp(big.NewInt(10), big.NewInt(18), nil),
}
// Decimal represents a fixed-point decimal number with 18 decimal places
type Decimal struct {
Value *big.Int
Precition uint
Value *big.Int
}
func NewDecimal() *Decimal {
return &Decimal{Value: new(big.Int).SetUint64(0)}
func NewDecimal(v uint64, p uint) *Decimal {
if p > MAX_PRECISION {
p = MAX_PRECISION
}
return &Decimal{Precition: p, Value: new(big.Int).SetUint64(v)}
}
func NewDecimalCopy(other *Decimal) *Decimal {
if other == nil {
return nil
}
return &Decimal{Value: new(big.Int).Set(other.Value)}
return &Decimal{Precition: other.Precition, Value: new(big.Int).Set(other.Value)}
}
// NewDecimalFromString creates a Decimal instance from a string
func NewDecimalFromString(s string) (*Decimal, int, error) {
func NewDecimalFromString(s string, maxPrecision int) (*Decimal, error) {
if s == "" {
return nil, 0, errors.New("empty string")
return nil, errors.New("empty string")
}
parts := strings.Split(s, ".")
if len(parts) > 2 {
return nil, 0, fmt.Errorf("invalid decimal format: %s", s)
return nil, fmt.Errorf("invalid decimal format: %s", s)
}
integerPartStr := parts[0]
if integerPartStr == "" || integerPartStr[0] == '+' {
return nil, 0, errors.New("empty integer")
return nil, errors.New("empty integer")
}
integerPart, ok := new(big.Int).SetString(parts[0], 10)
if !ok {
return nil, 0, fmt.Errorf("invalid integer format: %s", parts[0])
return nil, fmt.Errorf("invalid integer format: %s", parts[0])
}
currPrecision := 0
@@ -54,31 +80,31 @@ func NewDecimalFromString(s string) (*Decimal, int, error) {
if len(parts) == 2 {
decimalPartStr := parts[1]
if decimalPartStr == "" || decimalPartStr[0] == '-' || decimalPartStr[0] == '+' {
return nil, 0, errors.New("empty decimal")
return nil, errors.New("empty decimal")
}
currPrecision = len(decimalPartStr)
if currPrecision > MAX_PRECISION {
return nil, 0, fmt.Errorf("decimal exceeds maximum precision: %s", s)
if currPrecision > maxPrecision {
return nil, fmt.Errorf("decimal exceeds maximum precision: %s", s)
}
n := MAX_PRECISION - currPrecision
n := maxPrecision - currPrecision
for i := 0; i < n; i++ {
decimalPartStr += "0"
}
decimalPart, ok = new(big.Int).SetString(decimalPartStr, 10)
if !ok || decimalPart.Sign() < 0 {
return nil, 0, fmt.Errorf("invalid decimal format: %s", parts[0])
return nil, fmt.Errorf("invalid decimal format: %s", parts[0])
}
}
value := new(big.Int).Mul(integerPart, precisionFactor)
value := new(big.Int).Mul(integerPart, precisionFactor[maxPrecision])
if value.Sign() < 0 {
value = value.Sub(value, decimalPart)
} else {
value = value.Add(value, decimalPart)
}
return &Decimal{Value: value}, currPrecision, nil
return &Decimal{Precition: uint(maxPrecision), Value: value}, nil
}
// String returns the string representation of a Decimal instance
@@ -87,7 +113,7 @@ func (d *Decimal) String() string {
return "0"
}
value := new(big.Int).Abs(d.Value)
quotient, remainder := new(big.Int).QuoRem(value, precisionFactor, new(big.Int))
quotient, remainder := new(big.Int).QuoRem(value, precisionFactor[d.Precition], new(big.Int))
sign := ""
if d.Value.Sign() < 0 {
sign = "-"
@@ -95,7 +121,7 @@ func (d *Decimal) String() string {
if remainder.Sign() == 0 {
return fmt.Sprintf("%s%s", sign, quotient.String())
}
decimalPart := fmt.Sprintf("%0*d", MAX_PRECISION, remainder)
decimalPart := fmt.Sprintf("%0*d", d.Precition, remainder)
decimalPart = strings.TrimRight(decimalPart, "0")
return fmt.Sprintf("%s%s.%s", sign, quotient.String(), decimalPart)
}
@@ -103,40 +129,90 @@ func (d *Decimal) String() string {
// Add adds two Decimal instances and returns a new Decimal instance
func (d *Decimal) Add(other *Decimal) *Decimal {
if d == nil && other == nil {
value := new(big.Int).SetUint64(0)
return &Decimal{Value: value}
return nil
}
if other == nil {
value := new(big.Int).Set(d.Value)
return &Decimal{Value: value}
return &Decimal{Precition: d.Precition, Value: value}
}
if d == nil {
value := new(big.Int).Set(other.Value)
return &Decimal{Value: value}
return &Decimal{Precition: other.Precition, Value: value}
}
if d.Precition != other.Precition {
panic("precition not match")
}
value := new(big.Int).Add(d.Value, other.Value)
return &Decimal{Value: value}
return &Decimal{Precition: d.Precition, Value: value}
}
// Sub subtracts two Decimal instances and returns a new Decimal instance
func (d *Decimal) Sub(other *Decimal) *Decimal {
if d == nil && other == nil {
value := new(big.Int).SetUint64(0)
return &Decimal{Value: value}
return nil
}
if other == nil {
value := new(big.Int).Set(d.Value)
return &Decimal{Value: value}
return &Decimal{Precition: d.Precition, Value: value}
}
if d == nil {
value := new(big.Int).Neg(other.Value)
return &Decimal{Value: value}
return &Decimal{Precition: other.Precition, Value: value}
}
if d.Precition != other.Precition {
panic(fmt.Sprintf("precition not match, (%d != %d)", d.Precition, other.Precition))
}
value := new(big.Int).Sub(d.Value, other.Value)
return &Decimal{Value: value}
return &Decimal{Precition: d.Precition, Value: value}
}
// Mul muls two Decimal instances and returns a new Decimal instance
func (d *Decimal) Mul(other *Decimal) *Decimal {
if d == nil || other == nil {
return nil
}
value := new(big.Int).Mul(d.Value, other.Value)
// value := new(big.Int).Div(value0, precisionFactor[other.Precition])
return &Decimal{Precition: d.Precition, Value: value}
}
// Sqrt muls two Decimal instances and returns a new Decimal instance
func (d *Decimal) Sqrt() *Decimal {
if d == nil {
return nil
}
// value0 := new(big.Int).Mul(d.Value, precisionFactor[d.Precition])
value := new(big.Int).Sqrt(d.Value)
return &Decimal{Precition: MAX_PRECISION, Value: value}
}
// Div divs two Decimal instances and returns a new Decimal instance
func (d *Decimal) Div(other *Decimal) *Decimal {
if d == nil || other == nil {
return nil
}
// value0 := new(big.Int).Mul(d.Value, precisionFactor[other.Precition])
value := new(big.Int).Div(d.Value, other.Value)
return &Decimal{Precition: d.Precition, Value: value}
}
func (d *Decimal) Cmp(other *Decimal) int {
if d == nil && other == nil {
return 0
}
if other == nil {
return d.Value.Sign()
}
if d == nil {
return -other.Value.Sign()
}
if d.Precition != other.Precition {
panic(fmt.Sprintf("precition not match, (%d != %d)", d.Precition, other.Precition))
}
return d.Value.Cmp(other.Value)
}
func (d *Decimal) CmpAlign(other *Decimal) int {
if d == nil && other == nil {
return 0
}
@@ -162,19 +238,28 @@ func (d *Decimal) IsOverflowUint64() bool {
}
integerPart := new(big.Int).SetUint64(math.MaxUint64)
value := new(big.Int).Mul(integerPart, precisionFactor)
value := new(big.Int).Mul(integerPart, precisionFactor[d.Precition])
if d.Value.Cmp(value) > 0 {
return true
}
return false
}
func (d *Decimal) GetMaxUint64() *Decimal {
if d == nil {
return nil
}
integerPart := new(big.Int).SetUint64(math.MaxUint64)
value := new(big.Int).Mul(integerPart, precisionFactor[d.Precition])
return &Decimal{Precition: d.Precition, Value: value}
}
func (d *Decimal) Float64() float64 {
if d == nil {
return 0
}
value := new(big.Int).Abs(d.Value)
quotient, remainder := new(big.Int).QuoRem(value, precisionFactor, new(big.Int))
quotient, remainder := new(big.Int).QuoRem(value, precisionFactor[d.Precition], new(big.Int))
f := float64(quotient.Uint64()) + float64(remainder.Uint64())/math.MaxFloat64
if d.Value.Sign() < 0 {
return -f

View File

@@ -12,12 +12,18 @@ func TestNewDecimalFromString(t *testing.T) {
want string
err bool
}{
// valid
{"123456789.123456789", "123456789.123456789", false},
{"123456789.123", "123456789.123", false},
{"123456789", "123456789", false},
{"-123456789.123456789", "-123456789.123456789", false},
{"-123456789.123", "-123456789.123", false},
{"-123456789", "-123456789", false},
{"000001", "1", false},
{"000001.1", "1.1", false},
{"000001.100000000000000000", "1.1", false},
// invalid
{"", "", true},
{" ", "", true},
{".", "", true},
@@ -35,11 +41,12 @@ func TestNewDecimalFromString(t *testing.T) {
{"123.456.789", "", true},
{"123456789.", "123456789", true},
{"123456789.12345678901234567891", "", true},
{"0.1000000000000000000", "", true},
}
for _, tc := range testCases {
t.Run(tc.input, func(t *testing.T) {
got, _, err := decimal.NewDecimalFromString(tc.input)
got, err := decimal.NewDecimalFromString(tc.input, 18)
if (err != nil) != tc.err {
t.Fatalf("unexpected error: %v", err)
}
@@ -65,8 +72,8 @@ func TestDecimal_Add(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.a+"+"+tc.b, func(t *testing.T) {
da, _, _ := decimal.NewDecimalFromString(tc.a)
db, _, _ := decimal.NewDecimalFromString(tc.b)
da, _ := decimal.NewDecimalFromString(tc.a, 18)
db, _ := decimal.NewDecimalFromString(tc.b, 18)
got := da.Add(db)
if got.String() != tc.want {
t.Errorf("got %s, want %s", got.String(), tc.want)
@@ -90,8 +97,8 @@ func TestDecimal_Sub(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.a+"-"+tc.b, func(t *testing.T) {
da, _, _ := decimal.NewDecimalFromString(tc.a)
db, _, _ := decimal.NewDecimalFromString(tc.b)
da, _ := decimal.NewDecimalFromString(tc.a, 18)
db, _ := decimal.NewDecimalFromString(tc.b, 18)
got := da.Sub(db)
if got.String() != tc.want {
t.Errorf("got %s, want %s", got.String(), tc.want)
@@ -117,7 +124,7 @@ func TestDecimal_String(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.input, func(t *testing.T) {
d, _, _ := decimal.NewDecimalFromString(tc.input)
d, _ := decimal.NewDecimalFromString(tc.input, 18)
got := d.String()
if got != tc.want {
t.Errorf("got %s, want %s", got, tc.want)
@@ -127,16 +134,16 @@ func TestDecimal_String(t *testing.T) {
}
func BenchmarkAdd(b *testing.B) {
d1, _, _ := decimal.NewDecimalFromString("123456789.123456789")
d2, _, _ := decimal.NewDecimalFromString("987654321.987654321")
d1, _ := decimal.NewDecimalFromString("123456789.123456789", 18)
d2, _ := decimal.NewDecimalFromString("987654321.987654321", 18)
for n := 0; n < b.N; n++ {
d1.Add(d2)
}
}
func BenchmarkSub(b *testing.B) {
d1, _, _ := decimal.NewDecimalFromString("123456789.123456789")
d2, _, _ := decimal.NewDecimalFromString("987654321.987654321")
d1, _ := decimal.NewDecimalFromString("123456789.123456789", 18)
d2, _ := decimal.NewDecimalFromString("987654321.987654321", 18)
for n := 0; n < b.N; n++ {
d1.Sub(d2)
}

235
event/event.go Normal file
View File

@@ -0,0 +1,235 @@
package event
import (
"encoding/hex"
"encoding/json"
"fmt"
"io/ioutil"
"log"
"os"
"regexp"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func InitTickDataFromFile(fname string) (brc20Datas []*model.InscriptionBRC20Data, err error) {
// Open our jsonFile
jsonFile, err := os.Open(fname)
// if we os.Open returns an error then handle it
if err != nil {
fmt.Println(err)
return nil, err
}
// defer the closing of our jsonFile so that we can parse it later on
defer jsonFile.Close()
byteValue, err := ioutil.ReadAll(jsonFile)
if err != nil {
return nil, err
}
var ticksExternal []*model.InscriptionBRC20DeployContent
err = json.Unmarshal([]byte(byteValue), &ticksExternal)
if err != nil {
return nil, err
}
for idx, info := range ticksExternal {
var data model.InscriptionBRC20Data
data.TxId = fmt.Sprintf("10%030x", idx)
data.Idx = 0
data.Vout = 0
data.Offset = 0
data.Satoshi = 330
data.InscriptionId = fmt.Sprintf("10%030xi0", idx)
data.InscriptionNumber = 100 + int64(idx)
data.Height = 1
data.TxIdx = uint32(idx)
data.BlockTime = 100
var key model.NFTCreateIdxKey = model.NFTCreateIdxKey{
Height: data.Height,
IdxInBlock: uint64(idx), // fake idx
}
data.CreateIdxKey = key.String()
data.IsTransfer = false
data.ContentBody, _ = json.Marshal(info)
data.Sequence = 0
brc20Datas = append(brc20Datas, &data)
}
return brc20Datas, nil
}
func GenerateBRC20InputDataFromEvents(fname string) (brc20Datas []*model.InscriptionBRC20Data, err error) {
// Open our jsonFile
jsonFile, err := os.Open(fname)
// if we os.Open returns an error then handle it
if err != nil {
fmt.Println(err)
return nil, err
}
// defer the closing of our jsonFile so that we can parse it later on
defer jsonFile.Close()
byteValue, err := ioutil.ReadAll(jsonFile)
if err != nil {
return nil, err
}
var events []*model.BRC20ModuleHistoryInfoEvent
err = json.Unmarshal([]byte(byteValue), &events)
if err != nil {
return nil, err
}
for idx, e := range events {
var data model.InscriptionBRC20Data
txid, _ := hex.DecodeString(e.TxIdHex)
data.TxId = string(txid)
data.Idx = e.Idx
data.Vout = e.Vout
data.Offset = e.Offset
data.Satoshi = e.Satoshi
data.InscriptionId = e.InscriptionId // preset cache
data.InscriptionNumber = e.InscriptionNumber
data.Height = e.Height
data.TxIdx = e.TxIdx
data.BlockTime = e.BlockTime
var key model.NFTCreateIdxKey = model.NFTCreateIdxKey{
Height: data.Height,
IdxInBlock: uint64(idx), // fake idx
}
data.CreateIdxKey = key.String()
var pkScriptFrom, pkScriptTo string
if pk, err := utils.GetPkScriptByAddress(e.AddressFrom, conf.GlobalNetParams); err != nil {
log.Printf("GenerateBRC20InputDataFromEvents [%d] pk invalid: %s", idx, err)
} else {
pkScriptFrom = string(pk)
}
if pk, err := utils.GetPkScriptByAddress(e.AddressTo, conf.GlobalNetParams); err != nil {
pk, _ := hex.DecodeString(e.AddressTo)
pkScriptTo = string(pk)
} else {
pkScriptTo = string(pk)
}
data.PkScript = pkScriptTo
var inscribe model.InscriptionBRC20Data
inscribe = data
inscribe.IsTransfer = false
inscribe.ContentBody = []byte(e.ContentBody)
inscribe.Sequence = 0
if e.Type == "transfer" {
// mint
var mint model.InscriptionBRC20Data
mint = data
mint.ContentBody = []byte(strings.Replace(e.ContentBody, "transfer", "mint", 1))
mint.Sequence = 0
mint.PkScript = pkScriptFrom
brc20Datas = append(brc20Datas, &mint)
// transfer
inscribe.PkScript = pkScriptFrom
brc20Datas = append(brc20Datas, &inscribe)
// transfer send
data.IsTransfer = true
data.Sequence = 1
data.PkScript = pkScriptTo
brc20Datas = append(brc20Datas, &data)
} else if e.Type == "commit" {
inscribe.PkScript = pkScriptFrom
brc20Datas = append(brc20Datas, &inscribe)
// commit send
data.IsTransfer = true
data.Sequence = 1
brc20Datas = append(brc20Datas, &data)
} else if e.Type == "inscribe-module" {
inscribe.PkScript = pkScriptTo
brc20Datas = append(brc20Datas, &inscribe)
} else if e.Type == "inscribe-conditional-approve" {
inscribe.PkScript = pkScriptTo
brc20Datas = append(brc20Datas, &inscribe)
// send to delegator
data.IsTransfer = true
data.Sequence = 1
data.PkScript = constant.ZERO_ADDRESS_PKSCRIPT
brc20Datas = append(brc20Datas, &data)
} else if e.Type == "conditional-approve" {
content := strings.Replace(e.ContentBody, "brc20-swap", "brc-20", 1)
// fixme: always use mint may fail
// mint
var mint model.InscriptionBRC20Data
mint = data
// e.Data.Amount fixme: replace e.Data.Amount with
m1 := regexp.MustCompile(`"amt" *: *"[0-9\.]+"`)
content = m1.ReplaceAllString(content, fmt.Sprintf(`"amt":"%s"`, e.Data.Amount))
mintContent := strings.Replace(content, "conditional-approve", "mint", 1)
mint.ContentBody = []byte(mintContent)
mint.IsTransfer = false
mint.Sequence = 0
mint.PkScript = pkScriptTo
brc20Datas = append(brc20Datas, &mint)
// transfer
var transfer model.InscriptionBRC20Data
transfer = data
transferContent := strings.Replace(content, "conditional-approve", "transfer", 1)
transfer.ContentBody = []byte(transferContent)
transfer.IsTransfer = false
transfer.Sequence = 0
transfer.PkScript = pkScriptTo
brc20Datas = append(brc20Datas, &transfer)
// transfer send
var send model.InscriptionBRC20Data
send = data
send.IsTransfer = true
send.Sequence = 1
send.PkScript = pkScriptFrom
brc20Datas = append(brc20Datas, &send)
// send to delegator
data.IsTransfer = true
data.Sequence = 2
data.PkScript = constant.ZERO_ADDRESS_PKSCRIPT
brc20Datas = append(brc20Datas, &data)
} else {
log.Printf("GenerateBRC20InputDataFromEvents [%d] op invalid: %s", idx, e.Type)
// fixme: inscribe-conditional-approve / conditional-approve
}
}
return brc20Datas, nil
}

2
go.mod
View File

@@ -4,7 +4,7 @@ go 1.18
require (
github.com/btcsuite/btcd v0.23.4 // indirect
github.com/btcsuite/btcd/btcec/v2 v2.1.3 // indirect
github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect
github.com/btcsuite/btcd/btcutil v1.1.0 // indirect
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 // indirect
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f // indirect

2
go.sum
View File

@@ -6,6 +6,8 @@ github.com/btcsuite/btcd v0.23.4/go.mod h1:0QJIIN1wwIXF/3G/m87gIwGniDMDQqjVn4SZg
github.com/btcsuite/btcd/btcec/v2 v2.1.0/go.mod h1:2VzYrv4Gm4apmbVVsSq5bqf1Ec8v56E48Vt0Y/umPgA=
github.com/btcsuite/btcd/btcec/v2 v2.1.3 h1:xM/n3yIhHAhHy04z4i43C8p4ehixJZMsnrVJkgl+MTE=
github.com/btcsuite/btcd/btcec/v2 v2.1.3/go.mod h1:ctjw4H1kknNJmRN4iP1R7bTQ+v3GJkZBd6mui8ZsAZE=
github.com/btcsuite/btcd/btcec/v2 v2.3.2 h1:5n0X6hX0Zk+6omWcihdYvdAlGf2DfasC0GMf7DClJ3U=
github.com/btcsuite/btcd/btcec/v2 v2.3.2/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04=
github.com/btcsuite/btcd/btcutil v1.0.0/go.mod h1:Uoxwv0pqYWhD//tfTiipkxNfdhG9UrLwaeswfjfdF0A=
github.com/btcsuite/btcd/btcutil v1.1.0 h1:MO4klnGY+EWJdoWF12Wkuf4AWDBPMpZNeN/jRLrklUU=
github.com/btcsuite/btcd/btcutil v1.1.0/go.mod h1:5OapHB7A2hBBWLm48mmw4MOHNJCcUBTwmWH/0Jn8VHE=

View File

@@ -1,75 +1,133 @@
package indexer
import (
"errors"
"log"
"strconv"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20Indexer) ProcessDeploy(progress int, data *model.InscriptionBRC20Data, body *model.InscriptionBRC20Content) {
func (g *BRC20ModuleIndexer) ProcessDeploy(data *model.InscriptionBRC20Data) error {
body := new(model.InscriptionBRC20DeployContent)
if err := body.Unmarshal(data.ContentBody); err != nil {
return nil
}
// check tick
uniqueLowerTicker := strings.ToLower(body.BRC20Tick)
uniqueLowerTicker, err := utils.GetValidUniqueLowerTickerTicker(body.BRC20Tick)
if err != nil {
return nil
// return errors.New("deploy, tick length not 4 or 5")
}
if len(body.BRC20Tick) == 5 {
if body.BRC20SelfMint != "true" {
return nil
// return errors.New("deploy, tick length 5, but not self_mint")
}
if data.Height < conf.ENABLE_SELF_MINT_HEIGHT {
return nil
// return errors.New("deploy, tick length 5, but not enabled")
}
}
// tick enable, fixme: test only, not support space in ticker
if conf.TICKS_ENABLED != "" {
if strings.Contains(uniqueLowerTicker, " ") {
return nil
}
if !strings.Contains(conf.TICKS_ENABLED, uniqueLowerTicker) {
return nil
}
}
if _, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]; ok { // dup ticker
return
return nil
// return errors.New("deploy, but tick exist")
}
if body.BRC20Max == "" { // without max
log.Printf("(%d%%) ProcessBRC20Deploy, but max missing. ticker: %s",
progress,
log.Printf("deploy, but max missing. ticker: %s",
uniqueLowerTicker,
)
return
return errors.New("deploy, but max missing")
}
tinfo := model.NewInscriptionBRC20TickDeployInfo(body, data)
tinfo := model.NewInscriptionBRC20TickInfo(body.BRC20Tick, body.Operation, data)
tinfo.Data.BRC20Max = body.BRC20Max
tinfo.Data.BRC20Limit = body.BRC20Limit
tinfo.Data.BRC20Decimal = body.BRC20Decimal
tinfo.Data.BRC20Minted = "0"
tinfo.InscriptionNumberStart = data.InscriptionNumber
if len(body.BRC20Tick) == 5 && body.BRC20SelfMint == "true" {
tinfo.SelfMint = true
tinfo.Data.BRC20SelfMint = "true"
}
// dec
if dec, err := strconv.ParseUint(body.BRC20Decimal, 10, 64); err != nil || dec > 18 {
if dec, err := strconv.ParseUint(tinfo.Data.BRC20Decimal, 10, 64); err != nil || dec > 18 {
// dec invalid
log.Printf("(%d%%) ProcessBRC20Deploy, but dec invalid. ticker: %s, dec: %s",
progress,
log.Printf("deploy, but dec invalid. ticker: %s, dec: %s",
uniqueLowerTicker,
body.BRC20Decimal,
tinfo.Data.BRC20Decimal,
)
return
return errors.New("deploy, but dec invalid")
} else {
tinfo.Decimal = uint8(dec)
}
// max
if max, precision, err := decimal.NewDecimalFromString(body.BRC20Max); err != nil {
if max, err := decimal.NewDecimalFromString(body.BRC20Max, int(tinfo.Decimal)); err != nil {
// max invalid
log.Printf("(%d%%) ProcessBRC20Deploy, but max invalid. ticker: %s, max: '%s'",
progress,
log.Printf("deploy, but max invalid. ticker: %s, max: '%s'",
uniqueLowerTicker,
body.BRC20Max,
)
return
return errors.New("deploy, but max invalid")
} else {
if max.Sign() <= 0 || max.IsOverflowUint64() || precision > int(tinfo.Decimal) {
return
if max.Sign() < 0 || max.IsOverflowUint64() {
return nil
// return errors.New("deploy, but max invalid (range)")
}
if max.Sign() == 0 {
if tinfo.SelfMint {
tinfo.Max = max.GetMaxUint64()
} else {
return errors.New("deploy, but max invalid (0)")
}
} else {
tinfo.Max = max
}
tinfo.Max = max
}
// lim
if lim, precision, err := decimal.NewDecimalFromString(body.BRC20Limit); err != nil {
if lim, err := decimal.NewDecimalFromString(tinfo.Data.BRC20Limit, int(tinfo.Decimal)); err != nil {
// limit invalid
log.Printf("(%d%%) ProcessBRC20Deploy, but limit invalid. ticker: %s, limit: '%s'",
progress,
log.Printf("deploy, but limit invalid. ticker: %s, limit: '%s'",
uniqueLowerTicker,
body.BRC20Limit,
tinfo.Data.BRC20Limit,
)
return
return errors.New("deploy, but lim invalid")
} else {
if lim.Sign() <= 0 || lim.IsOverflowUint64() || precision > int(tinfo.Decimal) {
return
if lim.Sign() < 0 || lim.IsOverflowUint64() {
return errors.New("deploy, but lim invalid (range)")
}
if lim.Sign() == 0 {
if tinfo.SelfMint {
tinfo.Limit = lim.GetMaxUint64()
} else {
return errors.New("deploy, but lim invalid (0)")
}
} else {
tinfo.Limit = lim
}
tinfo.Limit = lim
}
tokenInfo := &model.BRC20TokenInfo{Ticker: body.BRC20Tick, Deploy: tinfo}
@@ -77,10 +135,19 @@ func (g *BRC20Indexer) ProcessDeploy(progress int, data *model.InscriptionBRC20D
tokenBalance := &model.BRC20TokenBalance{Ticker: body.BRC20Tick, PkScript: data.PkScript}
history := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_INSCRIBE_DEPLOY, true, false, &tinfo.InscriptionBRC20TickInfo, nil, data)
tokenBalance.History = append(tokenBalance.History, history)
tokenInfo.History = append(tokenInfo.History, history)
if g.EnableHistory {
historyObj := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_INSCRIBE_DEPLOY, true, false, tinfo, nil, data)
history := g.UpdateHistoryHeightAndGetHistoryIndex(historyObj)
tokenBalance.History = append(tokenBalance.History, history)
tokenInfo.History = append(tokenInfo.History, history)
// user history
userHistory := g.GetBRC20HistoryByUser(string(data.PkScript))
userHistory.History = append(userHistory.History, history)
// all history
g.AllHistory = append(g.AllHistory, history)
}
// init user tokens
var userTokens map[string]*model.BRC20TokenBalance
if tokens, ok := g.UserTokensBalanceData[string(data.PkScript)]; !ok {
@@ -96,5 +163,6 @@ func (g *BRC20Indexer) ProcessDeploy(progress int, data *model.InscriptionBRC20D
tokenUsers[string(data.PkScript)] = tokenBalance
g.TokenUsersBalanceData[uniqueLowerTicker] = tokenUsers
g.InscriptionsValidBRC20DataMap[data.CreateIdxKey] = &tinfo.InscriptionBRC20TickInfo
g.InscriptionsValidBRC20DataMap[data.CreateIdxKey] = tinfo.Data
return nil
}

View File

@@ -1,33 +1,47 @@
package indexer
import (
"strings"
"errors"
"fmt"
"time"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20Indexer) ProcessMint(progress int, data *model.InscriptionBRC20Data, body *model.InscriptionBRC20Content) {
func (g *BRC20ModuleIndexer) ProcessMint(data *model.InscriptionBRC20Data) error {
body := new(model.InscriptionBRC20MintTransferContent)
if err := body.Unmarshal(data.ContentBody); err != nil {
return nil
}
// check tick
uniqueLowerTicker := strings.ToLower(body.BRC20Tick)
uniqueLowerTicker, err := utils.GetValidUniqueLowerTickerTicker(body.BRC20Tick)
if err != nil {
return nil
// return errors.New("mint, tick length not 4 or 5")
}
tokenInfo, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]
if !ok {
return
return nil
// return errors.New(fmt.Sprintf("mint %s, but tick not exist", body.BRC20Tick))
}
tinfo := tokenInfo.Deploy
if tinfo.SelfMint {
if utils.DecodeInscriptionFromBin(data.Parent) != tinfo.GetInscriptionId() {
return errors.New(fmt.Sprintf("self mint %s, but parent invalid", body.BRC20Tick))
}
}
// check mint amount
amt, precision, err := decimal.NewDecimalFromString(body.BRC20Amount)
amt, err := decimal.NewDecimalFromString(body.BRC20Amount, int(tinfo.Decimal))
if err != nil {
return
}
if precision > int(tinfo.Decimal) {
return
return errors.New(fmt.Sprintf("mint %s, but invalid amount(%s)", body.BRC20Tick, body.BRC20Amount))
}
if amt.Sign() <= 0 || amt.Cmp(tinfo.Limit) > 0 {
return
return errors.New(fmt.Sprintf("mint %s, invalid amount(%s), limit(%s)", body.BRC20Tick, body.BRC20Amount, tinfo.Limit))
}
// get user's tokens to update
@@ -43,26 +57,31 @@ func (g *BRC20Indexer) ProcessMint(progress int, data *model.InscriptionBRC20Dat
if token, ok := userTokens[uniqueLowerTicker]; !ok {
tokenBalance = &model.BRC20TokenBalance{Ticker: tokenInfo.Ticker, PkScript: data.PkScript}
userTokens[uniqueLowerTicker] = tokenBalance
// init token's users
tokenUsers := g.TokenUsersBalanceData[uniqueLowerTicker]
tokenUsers[string(data.PkScript)] = tokenBalance
} else {
tokenBalance = token
}
// init token's users
tokenUsers := g.TokenUsersBalanceData[uniqueLowerTicker]
tokenUsers[string(data.PkScript)] = tokenBalance
body.BRC20Tick = tokenInfo.Ticker
mintInfo := model.NewInscriptionBRC20TickMintInfo(body, data)
mintInfo := model.NewInscriptionBRC20TickInfo(body.BRC20Tick, body.Operation, data)
mintInfo.Data.BRC20Amount = body.BRC20Amount
mintInfo.Data.BRC20Minted = amt.String()
mintInfo.Decimal = tinfo.Decimal
mintInfo.Amount = amt
if tinfo.TotalMinted.Cmp(tinfo.Max) >= 0 {
// invalid history
history := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_INSCRIBE_MINT, false, false, &mintInfo.InscriptionBRC20TickInfo, tokenBalance, data)
tokenBalance.History = append(tokenBalance.History, history)
tokenBalance.HistoryMint = append(tokenBalance.HistoryMint, history)
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryMint = append(tokenInfo.HistoryMint, history)
return
if g.EnableHistory {
historyObj := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_INSCRIBE_MINT, false, false, mintInfo, tokenBalance, data)
history := g.UpdateHistoryHeightAndGetHistoryIndex(historyObj)
tokenBalance.History = append(tokenBalance.History, history)
tokenBalance.HistoryMint = append(tokenBalance.HistoryMint, history)
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryMint = append(tokenInfo.HistoryMint, history)
}
return errors.New(fmt.Sprintf("mint %s, but mint out", body.BRC20Tick))
}
// update tinfo
@@ -89,24 +108,41 @@ func (g *BRC20Indexer) ProcessMint(progress int, data *model.InscriptionBRC20Dat
}
// count
tinfo.MintTimes++
tinfo.Data.BRC20Minted = tinfo.TotalMinted.String()
// valid mint inscriptionNumber range
tinfo.InscriptionNumberEnd = data.InscriptionNumber
// update mint info
mintInfo.Data.BRC20Minted = balanceMinted.String()
mintInfo.Amount = balanceMinted
// update tokenBalance
if data.BlockTime > 0 {
tokenBalance.OverallBalanceSafe = tokenBalance.OverallBalanceSafe.Add(balanceMinted)
tokenBalance.AvailableBalanceSafe = tokenBalance.AvailableBalanceSafe.Add(balanceMinted)
}
tokenBalance.OverallBalance = tokenBalance.OverallBalance.Add(balanceMinted)
tokenBalance.AvailableBalance = tokenBalance.AvailableBalance.Add(balanceMinted)
// history
history := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_INSCRIBE_MINT, true, false, &mintInfo.InscriptionBRC20TickInfo, tokenBalance, data)
tokenBalance.History = append(tokenBalance.History, history)
tokenBalance.HistoryMint = append(tokenBalance.HistoryMint, history)
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryMint = append(tokenInfo.HistoryMint, history)
// burn
if len(data.PkScript) == 1 && data.PkScript[0] == 0x6a {
tinfo.Burned = tinfo.Burned.Add(balanceMinted)
}
g.InscriptionsValidBRC20DataMap[data.CreateIdxKey] = &mintInfo.InscriptionBRC20TickInfo
if g.EnableHistory {
// history
historyObj := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_INSCRIBE_MINT, true, false, mintInfo, tokenBalance, data)
history := g.UpdateHistoryHeightAndGetHistoryIndex(historyObj)
// tick history
tokenBalance.History = append(tokenBalance.History, history)
tokenBalance.HistoryMint = append(tokenBalance.HistoryMint, history)
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryMint = append(tokenInfo.HistoryMint, history)
// user address
userHistory := g.GetBRC20HistoryByUser(string(data.PkScript))
userHistory.History = append(userHistory.History, history)
// all history
g.AllHistory = append(g.AllHistory, history)
}
// g.InscriptionsValidBRC20DataMap[data.CreateIdxKey] = mintInfo.Data
return nil
}

View File

@@ -1,6 +1,7 @@
package indexer
import (
"errors"
"log"
"strings"
@@ -10,8 +11,8 @@ import (
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20Indexer) GetTransferInfoByKey(createIdxKey string) (
transferInfo *model.InscriptionBRC20TickTransferInfo, isInvalid bool) {
func (g *BRC20ModuleIndexer) GetTransferInfoByKey(createIdxKey string) (
transferInfo *model.InscriptionBRC20TickInfo, isInvalid bool) {
var ok bool
// transfer
transferInfo, ok = g.InscriptionsValidTransferMap[createIdxKey]
@@ -20,26 +21,27 @@ func (g *BRC20Indexer) GetTransferInfoByKey(createIdxKey string) (
if !ok {
transferInfo = nil
} else {
delete(g.InscriptionsInvalidTransferMap, createIdxKey)
// don't remove. use for api valid data
// delete(g.InscriptionsInvalidTransferMap, createIdxKey)
}
isInvalid = true
} else {
delete(g.InscriptionsValidTransferMap, createIdxKey)
// don't remove. use for api valid data
// delete(g.InscriptionsValidTransferMap, createIdxKey)
}
return transferInfo, isInvalid
}
func (g *BRC20Indexer) ProcessTransfer(progress int, data *model.InscriptionBRC20Data, transferInfo *model.InscriptionBRC20TickTransferInfo, isInvalid bool) {
func (g *BRC20ModuleIndexer) ProcessTransfer(data *model.InscriptionBRC20Data, transferInfo *model.InscriptionBRC20TickInfo, isInvalid bool) error {
// ticker
uniqueLowerTicker := strings.ToLower(transferInfo.BRC20Tick)
uniqueLowerTicker := strings.ToLower(transferInfo.Tick)
tokenInfo, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]
if !ok {
log.Printf("(%d%%) ProcessBRC20Transfer send transfer, but ticker invalid. txid: %s",
progress,
utils.GetReversedStringHex(data.TxId),
log.Printf("ProcessBRC20Transfer send transfer, but ticker invalid. txid: %s",
utils.HashString([]byte(data.TxId)),
)
return
return errors.New("transfer, invalid ticker")
}
// to
@@ -51,46 +53,58 @@ func (g *BRC20Indexer) ProcessTransfer(progress int, data *model.InscriptionBRC2
}
// global history
history := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_TRANSFER, !isInvalid, true, &transferInfo.InscriptionBRC20TickInfo, nil, data)
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryTransfer = append(tokenInfo.HistoryTransfer, history)
if g.EnableHistory {
historyObj := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_TRANSFER, !isInvalid, true, transferInfo, nil, data)
history := g.UpdateHistoryHeightAndGetHistoryIndex(historyObj)
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryTransfer = append(tokenInfo.HistoryTransfer, history)
if !isInvalid {
// all history
g.AllHistory = append(g.AllHistory, history)
}
}
// from
// get user's tokens to update
fromUserTokens, ok := g.UserTokensBalanceData[senderPkScript]
if !ok {
log.Printf("(%d%%) ProcessBRC20Transfer send from user missing. height: %d, txidx: %d",
progress,
log.Printf("ProcessBRC20Transfer send from user missing. height: %d, txidx: %d",
data.Height,
data.TxIdx,
)
return
return errors.New("transfer, invalid from data")
}
// get tokenBalance to update
fromTokenBalance, ok := fromUserTokens[uniqueLowerTicker]
if !ok {
log.Printf("(%d%%) ProcessBRC20Transfer send from ticker missing. height: %d, txidx: %d",
progress,
log.Printf("ProcessBRC20Transfer send from ticker missing. height: %d, txidx: %d",
data.Height,
data.TxIdx,
)
return
return errors.New("transfer, invalid from balance")
}
if isInvalid {
fromHistory := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_SEND, false, true, &transferInfo.InscriptionBRC20TickInfo, fromTokenBalance, data)
fromTokenBalance.History = append(fromTokenBalance.History, fromHistory)
fromTokenBalance.HistorySend = append(fromTokenBalance.HistorySend, fromHistory)
return
if g.EnableHistory {
historyObj := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_SEND, false, true, transferInfo, fromTokenBalance, data)
fromHistory := g.UpdateHistoryHeightAndGetHistoryIndex(historyObj)
fromTokenBalance.History = append(fromTokenBalance.History, fromHistory)
fromTokenBalance.HistorySend = append(fromTokenBalance.HistorySend, fromHistory)
userHistory := g.GetBRC20HistoryByUser(senderPkScript)
userHistory.History = append(userHistory.History, fromHistory)
}
return nil
}
if _, ok := fromTokenBalance.ValidTransferMap[data.CreateIdxKey]; !ok {
log.Printf("(%d%%) ProcessBRC20Transfer send from transfer missing(dup transfer?). height: %d, txidx: %d",
progress,
log.Printf("ProcessBRC20Transfer send from transfer missing(dup transfer?). height: %d, txidx: %d",
data.Height,
data.TxIdx,
)
return
return errors.New("transfer, invalid transfer")
}
// to
@@ -105,62 +119,132 @@ func (g *BRC20Indexer) ProcessTransfer(progress int, data *model.InscriptionBRC2
// get tokenBalance to update
var tokenBalance *model.BRC20TokenBalance
if token, ok := userTokens[uniqueLowerTicker]; !ok {
tokenBalance = &model.BRC20TokenBalance{Ticker: transferInfo.BRC20Tick, PkScript: receiverPkScript}
tokenBalance = &model.BRC20TokenBalance{Ticker: transferInfo.Tick, PkScript: receiverPkScript}
userTokens[uniqueLowerTicker] = tokenBalance
// set token's users
tokenUsers := g.TokenUsersBalanceData[uniqueLowerTicker]
tokenUsers[receiverPkScript] = tokenBalance
} else {
tokenBalance = token
}
// set token's users
tokenUsers := g.TokenUsersBalanceData[uniqueLowerTicker]
tokenUsers[receiverPkScript] = tokenBalance
// set from
fromTokenBalance.OverallBalanceSafe = fromTokenBalance.OverallBalanceSafe.Sub(transferInfo.Amount)
fromTokenBalance.OverallBalance = fromTokenBalance.OverallBalance.Sub(transferInfo.Amount)
fromTokenBalance.TransferableBalance = fromTokenBalance.TransferableBalance.Sub(transferInfo.Amount)
delete(fromTokenBalance.ValidTransferMap, data.CreateIdxKey)
fromHistory := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_SEND, true, true, &transferInfo.InscriptionBRC20TickInfo, fromTokenBalance, data)
fromTokenBalance.History = append(fromTokenBalance.History, fromHistory)
fromTokenBalance.HistorySend = append(fromTokenBalance.HistorySend, fromHistory)
if g.EnableHistory {
historyObj := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_SEND, true, true, transferInfo, fromTokenBalance, data)
fromHistory := g.UpdateHistoryHeightAndGetHistoryIndex(historyObj)
fromTokenBalance.History = append(fromTokenBalance.History, fromHistory)
fromTokenBalance.HistorySend = append(fromTokenBalance.HistorySend, fromHistory)
userHistoryFrom := g.GetBRC20HistoryByUser(senderPkScript)
userHistoryFrom.History = append(userHistoryFrom.History, fromHistory)
}
// set to
if data.BlockTime > 0 {
tokenBalance.OverallBalanceSafe = tokenBalance.OverallBalanceSafe.Add(transferInfo.Amount)
tokenBalance.AvailableBalanceSafe = tokenBalance.AvailableBalanceSafe.Add(transferInfo.Amount)
}
tokenBalance.OverallBalance = tokenBalance.OverallBalance.Add(transferInfo.Amount)
tokenBalance.AvailableBalance = tokenBalance.AvailableBalance.Add(transferInfo.Amount)
toHistory := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_RECEIVE, true, true, &transferInfo.InscriptionBRC20TickInfo, tokenBalance, data)
tokenBalance.History = append(tokenBalance.History, toHistory)
tokenBalance.HistoryReceive = append(tokenBalance.HistoryReceive, toHistory)
// burn
if len(receiverPkScript) == 1 && []byte(receiverPkScript)[0] == 0x6a {
tokenInfo.Deploy.Burned = tokenInfo.Deploy.Burned.Add(transferInfo.Amount)
}
if g.EnableHistory {
historyObj := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_RECEIVE, true, true, transferInfo, tokenBalance, data)
toHistory := g.UpdateHistoryHeightAndGetHistoryIndex(historyObj)
tokenBalance.History = append(tokenBalance.History, toHistory)
tokenBalance.HistoryReceive = append(tokenBalance.HistoryReceive, toHistory)
userHistoryTo := g.GetBRC20HistoryByUser(receiverPkScript)
userHistoryTo.History = append(userHistoryTo.History, toHistory)
}
////////////////////////////////////////////////////////////////
// skip module deposit if self mint for now
if tokenInfo.Deploy.SelfMint {
return nil
}
////////////////////////////////////////////////////////////////
// module conditional approve (black withdraw)
if g.ThisTxId != data.TxId {
g.TxStaticTransferStatesForConditionalApprove = nil
g.ThisTxId = data.TxId
}
inscriptionId := transferInfo.Meta.GetInscriptionId()
events := g.GenerateApproveEventsByTransfer(inscriptionId, transferInfo.Tick, senderPkScript, receiverPkScript, transferInfo.Amount)
if err := g.ProcessConditionalApproveEvents(events); err != nil {
return err
}
////////////////////////////////////////////////////////////////
// module deposit
moduleId, ok := utils.GetModuleFromScript([]byte(receiverPkScript))
if !ok {
// errors.New("module transfer, not module")
return nil
}
moduleInfo, ok := g.ModulesInfoMap[moduleId]
if !ok { // invalid module
return nil
// return errors.New(fmt.Sprintf("module transfer, module(%s) not exist", moduleId))
}
// global history
mHistory := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_TYPE_N_TRANSFER, transferInfo.Meta, data, nil, true)
moduleInfo.History = append(moduleInfo.History, mHistory)
// get user's tokens to update
moduleTokenBalance := moduleInfo.GetUserTokenBalance(transferInfo.Tick, senderPkScript)
// set module deposit
if data.BlockTime > 0 { // how many confirmes ok
moduleTokenBalance.SwapAccountBalanceSafe = moduleTokenBalance.SwapAccountBalanceSafe.Add(transferInfo.Amount)
}
moduleTokenBalance.SwapAccountBalance = moduleTokenBalance.SwapAccountBalance.Add(transferInfo.Amount)
// record state
stateBalance := moduleInfo.GetTickConditionalApproveStateBalance(transferInfo.Tick)
stateBalance.BalanceDeposite = stateBalance.BalanceDeposite.Add(transferInfo.Amount)
return nil
}
func (g *BRC20Indexer) ProcessInscribeTransfer(progress int, data *model.InscriptionBRC20Data, body *model.InscriptionBRC20Content) {
func (g *BRC20ModuleIndexer) ProcessInscribeTransfer(data *model.InscriptionBRC20Data) error {
body := new(model.InscriptionBRC20MintTransferContent)
if err := body.Unmarshal(data.ContentBody); err != nil {
return nil
}
// check tick
uniqueLowerTicker := strings.ToLower(body.BRC20Tick)
uniqueLowerTicker, err := utils.GetValidUniqueLowerTickerTicker(body.BRC20Tick)
if err != nil {
return nil
// return errors.New("transfer, tick length not 4 or 5")
}
tokenInfo, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]
if !ok {
return
return nil
// return errors.New(fmt.Sprintf("transfer %s, but tick not exist", body.BRC20Tick))
}
tinfo := tokenInfo.Deploy
// check amount
amt, precision, err := decimal.NewDecimalFromString(body.BRC20Amount)
amt, err := decimal.NewDecimalFromString(body.BRC20Amount, int(tinfo.Decimal))
if err != nil {
log.Printf("(%d%%) ProcessInscribeTransfer, but amount invalid. ticker: %s, amount: '%s'",
progress,
tokenInfo.Ticker,
body.BRC20Amount,
)
return
}
if precision > int(tinfo.Decimal) {
return
return nil
// return errors.New("transfer, but invalid amount")
}
if amt.Sign() <= 0 || amt.Cmp(tinfo.Max) > 0 {
return
return nil
// return errors.New("transfer, invalid amount(range)")
}
balanceTransfer := decimal.NewDecimalCopy(amt)
@@ -178,51 +262,76 @@ func (g *BRC20Indexer) ProcessInscribeTransfer(progress int, data *model.Inscrip
if token, ok := userTokens[uniqueLowerTicker]; !ok {
tokenBalance = &model.BRC20TokenBalance{Ticker: tokenInfo.Ticker, PkScript: data.PkScript}
userTokens[uniqueLowerTicker] = tokenBalance
// set token's users
tokenUsers := g.TokenUsersBalanceData[uniqueLowerTicker]
tokenUsers[string(data.PkScript)] = tokenBalance
} else {
tokenBalance = token
}
// set token's users
tokenUsers := g.TokenUsersBalanceData[uniqueLowerTicker]
tokenUsers[string(data.PkScript)] = tokenBalance
body.BRC20Tick = tokenInfo.Ticker
transferInfo := model.NewInscriptionBRC20TickTransferInfo(body, data)
transferInfo.Decimal = tinfo.Decimal
transferInfo := model.NewInscriptionBRC20TickInfo(body.BRC20Tick, body.Operation, data)
transferInfo.Data.BRC20Amount = body.BRC20Amount
transferInfo.Data.BRC20Limit = tinfo.Data.BRC20Limit
transferInfo.Data.BRC20Decimal = tinfo.Data.BRC20Decimal
transferInfo.Tick = tokenInfo.Ticker
transferInfo.Amount = balanceTransfer
transferInfo.Meta = data
history := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_INSCRIBE_TRANSFER, true, false, &transferInfo.InscriptionBRC20TickInfo, tokenBalance, data)
if tokenBalance.OverallBalance.Sub(tokenBalance.TransferableBalance).Cmp(balanceTransfer) < 0 { // invalid
history.Valid = false
// user history
tokenBalance.History = append(tokenBalance.History, history)
tokenBalance.HistoryInscribeTransfer = append(tokenBalance.HistoryInscribeTransfer, history)
// global history
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryInscribeTransfer = append(tokenInfo.HistoryInscribeTransfer, history)
if g.EnableHistory {
history := g.HistoryCount
historyObj := model.NewBRC20History(constant.BRC20_HISTORY_TYPE_N_INSCRIBE_TRANSFER, true, false, transferInfo, tokenBalance, data)
// If use the safe version of the available balance, it will cause the unconfirmed balance to not be able to be used to create a valid transfer inscription.
if tokenBalance.AvailableBalance.Cmp(balanceTransfer) < 0 {
historyObj.Valid = false
// user history
tokenBalance.History = append(tokenBalance.History, history)
tokenBalance.HistoryInscribeTransfer = append(tokenBalance.HistoryInscribeTransfer, history)
// global history
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryInscribeTransfer = append(tokenInfo.HistoryInscribeTransfer, history)
tokenBalance.InvalidTransferList = append(tokenBalance.InvalidTransferList, transferInfo)
userHistory := g.GetBRC20HistoryByUser(string(data.PkScript))
userHistory.History = append(userHistory.History, history)
} else {
historyObj.Valid = true
// user tick history
tokenBalance.History = append(tokenBalance.History, history)
tokenBalance.HistoryInscribeTransfer = append(tokenBalance.HistoryInscribeTransfer, history)
// user history
userHistory := g.GetBRC20HistoryByUser(string(data.PkScript))
userHistory.History = append(userHistory.History, history)
// global history
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryInscribeTransfer = append(tokenInfo.HistoryInscribeTransfer, history)
// all history
g.AllHistory = append(g.AllHistory, history)
}
g.UpdateHistoryHeightAndGetHistoryIndex(historyObj)
}
// If use the safe version of the available balance, it will cause the unconfirmed balance to not be able to be used to create a valid transfer inscription.
if tokenBalance.AvailableBalance.Cmp(balanceTransfer) < 0 {
g.InscriptionsInvalidTransferMap[data.CreateIdxKey] = transferInfo
} else {
tokenBalance.TransferableBalance = tokenBalance.TransferableBalance.Add(balanceTransfer)
history.TransferableBalance = tokenBalance.TransferableBalance.String() // update balance
history.AvailableBalance = tokenBalance.OverallBalance.Sub(tokenBalance.TransferableBalance).String() // update balance
// Update available balance
history.Valid = true
// user history
tokenBalance.History = append(tokenBalance.History, history)
tokenBalance.HistoryInscribeTransfer = append(tokenBalance.HistoryInscribeTransfer, history)
// global history
tokenInfo.History = append(tokenInfo.History, history)
tokenInfo.HistoryInscribeTransfer = append(tokenInfo.HistoryInscribeTransfer, history)
// fixme: The available safe balance may not decrease, the current transfer usage of available balance source is not accurately distinguished.
tokenBalance.AvailableBalanceSafe = tokenBalance.AvailableBalanceSafe.Sub(balanceTransfer)
tokenBalance.AvailableBalance = tokenBalance.AvailableBalance.Sub(balanceTransfer)
tokenBalance.TransferableBalance = tokenBalance.TransferableBalance.Add(balanceTransfer)
if tokenBalance.ValidTransferMap == nil {
tokenBalance.ValidTransferMap = make(map[string]*model.InscriptionBRC20TickTransferInfo, 1)
tokenBalance.ValidTransferMap = make(map[string]*model.InscriptionBRC20TickInfo, 1)
}
tokenBalance.ValidTransferMap[data.CreateIdxKey] = transferInfo
g.InscriptionsValidTransferMap[data.CreateIdxKey] = transferInfo
g.InscriptionsValidBRC20DataMap[data.CreateIdxKey] = &transferInfo.InscriptionBRC20TickInfo
g.InscriptionsValidBRC20DataMap[data.CreateIdxKey] = transferInfo.Data
}
return nil
}

196
indexer/indexer.go Normal file
View File

@@ -0,0 +1,196 @@
package indexer
import (
"bytes"
"log"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/model"
)
func isJson(contentBody []byte) bool {
if len(contentBody) < 40 {
return false
}
content := bytes.TrimSpace(contentBody)
if !bytes.HasPrefix(content, []byte("{")) {
return false
}
if !bytes.HasSuffix(content, []byte("}")) {
return false
}
return true
}
// ProcessUpdateLatestBRC20Loop
func (g *BRC20ModuleIndexer) ProcessUpdateLatestBRC20Loop(brc20Datas, brc20DatasDump chan interface{}) {
if brc20Datas == nil {
return
}
for dataIn := range brc20Datas {
for {
data := dataIn.(*model.InscriptionBRC20Data)
// update latest height
g.BestHeight = data.Height
// is sending transfer
if data.IsTransfer {
// module conditional approve
if condApproveInfo, isInvalid := g.GetConditionalApproveInfoByKey(data.CreateIdxKey); condApproveInfo != nil {
if err := g.ProcessConditionalApprove(data, condApproveInfo, isInvalid); err != nil {
log.Printf("process conditional approve move failed: %s", err)
}
break
}
// not first move
if data.Sequence != 1 {
break
}
// transfer
if transferInfo, isInvalid := g.GetTransferInfoByKey(data.CreateIdxKey); transferInfo != nil {
if err := g.ProcessTransfer(data, transferInfo, isInvalid); err != nil {
log.Printf("process transfer move failed: %s", err)
}
break
}
// module approve
if approveInfo, isInvalid := g.GetApproveInfoByKey(data.CreateIdxKey); approveInfo != nil {
if err := g.ProcessApprove(data, approveInfo, isInvalid); err != nil {
log.Printf("process approve move failed: %s", err)
}
break
}
// module commit
if commitFrom, isInvalid := g.GetCommitInfoByKey(data.CreateIdxKey); commitFrom != nil {
if err := g.ProcessCommit(commitFrom, data, isInvalid); err != nil {
log.Printf("process commit move failed: %s", err)
}
break
}
break
}
// inscribe as fee
if data.Satoshi == 0 {
break
}
if ok := isJson(data.ContentBody); !ok {
// log.Println("not json")
break
}
// protocal, lower case only
body := new(model.InscriptionBRC20ProtocalContent)
if err := body.Unmarshal(data.ContentBody); err != nil {
// log.Println("Unmarshal failed", err, string(data.ContentBody))
break
}
// is inscribe deploy/mint/transfer
if body.Proto != constant.BRC20_P &&
body.Proto != constant.BRC20_P_MODULE &&
body.Proto != constant.BRC20_P_SWAP {
// log.Println("not proto")
break
}
var process func(*model.InscriptionBRC20Data) error
if body.Proto == constant.BRC20_P && body.Operation == constant.BRC20_OP_DEPLOY {
process = g.ProcessDeploy
} else if body.Proto == constant.BRC20_P && body.Operation == constant.BRC20_OP_MINT {
process = g.ProcessMint
} else if body.Proto == constant.BRC20_P && body.Operation == constant.BRC20_OP_TRANSFER {
process = g.ProcessInscribeTransfer
} else if body.Proto == constant.BRC20_P_MODULE && body.Operation == constant.BRC20_OP_MODULE_DEPLOY {
process = g.ProcessCreateModule
} else if body.Proto == constant.BRC20_P_MODULE && body.Operation == constant.BRC20_OP_MODULE_WITHDRAW {
process = g.ProcessInscribeWithdraw
} else if body.Proto == constant.BRC20_P_SWAP && body.Operation == constant.BRC20_OP_SWAP_APPROVE {
process = g.ProcessInscribeApprove
} else if body.Proto == constant.BRC20_P_SWAP && body.Operation == constant.BRC20_OP_SWAP_CONDITIONAL_APPROVE {
process = g.ProcessInscribeConditionalApprove
} else if body.Proto == constant.BRC20_P_SWAP && body.Operation == constant.BRC20_OP_SWAP_COMMIT {
process = g.ProcessInscribeCommit
} else {
break
}
if err := process(data); err != nil {
if body.Operation == constant.BRC20_OP_MINT {
if conf.DEBUG {
log.Printf("(%d) process failed: %s", g.BestHeight, err)
}
} else {
log.Printf("(%d) process failed: %s", g.BestHeight, err)
}
}
break
}
if brc20DatasDump != nil {
brc20DatasDump <- dataIn
}
}
for _, holdersBalanceMap := range g.TokenUsersBalanceData {
for key, balance := range holdersBalanceMap {
if balance.AvailableBalance.Sign() == 0 && balance.TransferableBalance.Sign() == 0 {
delete(holdersBalanceMap, key)
}
}
}
log.Printf("process swap finish. ticker: %d, users: %d, tokens: %d, validInscription: %d, validTransfer: %d, invalidTransfer: %d, history: %d",
len(g.InscriptionsTickerInfoMap),
len(g.UserTokensBalanceData),
len(g.TokenUsersBalanceData),
len(g.InscriptionsValidBRC20DataMap),
len(g.InscriptionsValidTransferMap),
len(g.InscriptionsInvalidTransferMap),
g.HistoryCount,
)
nswap := 0
for _, m := range g.ModulesInfoMap {
nswap += len(m.SwapPoolTotalBalanceDataMap)
}
nuser := 0
for _, m := range g.ModulesInfoMap {
nuser += len(m.UsersTokenBalanceDataMap)
}
log.Printf("process swap finish. module: %d, swap: %d, users: %d, validApprove: %d, invalidApprove: %d, validCommit: %d, invalidCommit: %d",
len(g.ModulesInfoMap),
nswap,
nuser,
len(g.InscriptionsValidApproveMap),
len(g.InscriptionsInvalidApproveMap),
len(g.InscriptionsValidCommitMap),
len(g.InscriptionsInvalidCommitMap),
)
}
// ProcessUpdateLatestBRC20Init
func (g *BRC20ModuleIndexer) Init() {
g.initBRC20()
g.initModule()
}

View File

@@ -1,26 +1,125 @@
package indexer
import (
"bytes"
"log"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
)
type BRC20Indexer struct {
type BRC20ModuleIndexer struct {
BestHeight uint32
EnableHistory bool
HistoryCount uint32
HistoryData [][]byte
// history height
FirstHistoryByHeight map[uint32]uint32
LastHistoryHeight uint32
// brc20 base
AllHistory []uint32 // all valid history
UserAllHistory map[string]*model.BRC20UserHistory
InscriptionsTickerInfoMap map[string]*model.BRC20TokenInfo
UserTokensBalanceData map[string]map[string]*model.BRC20TokenBalance
TokenUsersBalanceData map[string]map[string]*model.BRC20TokenBalance
InscriptionsValidBRC20DataMap map[string]*model.InscriptionBRC20TickInfo
InscriptionsValidBRC20DataMap map[string]*model.InscriptionBRC20InfoResp
// inner valid transfer
InscriptionsValidTransferMap map[string]*model.InscriptionBRC20TickTransferInfo
InscriptionsValidTransferMap map[string]*model.InscriptionBRC20TickInfo
// inner invalid transfer
InscriptionsInvalidTransferMap map[string]*model.InscriptionBRC20TickTransferInfo
InscriptionsInvalidTransferMap map[string]*model.InscriptionBRC20TickInfo
// module
// all modules info
ModulesInfoMap map[string]*model.BRC20ModuleSwapInfo
// module of users [address]moduleid
UsersModuleWithTokenMap map[string]string
// module lp of users [address]moduleid
UsersModuleWithLpTokenMap map[string]string
// runtime for approve
InscriptionsValidApproveMap map[string]*model.InscriptionBRC20SwapInfo // inner valid approve
InscriptionsInvalidApproveMap map[string]*model.InscriptionBRC20SwapInfo
// runtime for conditional approve
InscriptionsValidConditionalApproveMap map[string]*model.InscriptionBRC20SwapConditionalApproveInfo
InscriptionsInvalidConditionalApproveMap map[string]*model.InscriptionBRC20SwapConditionalApproveInfo
// runtime for commit
InscriptionsValidCommitMap map[string]*model.InscriptionBRC20Data // inner valid commit by key
InscriptionsInvalidCommitMap map[string]*model.InscriptionBRC20Data
InscriptionsValidCommitMapById map[string]*model.InscriptionBRC20Data // inner valid commit by id
// for gen approve event
ThisTxId string
TxStaticTransferStatesForConditionalApprove []*model.TransferStateForConditionalApprove
}
func (g *BRC20Indexer) initBRC20() {
func (g *BRC20ModuleIndexer) GetBRC20HistoryByUser(pkScript string) (userHistory *model.BRC20UserHistory) {
if history, ok := g.UserAllHistory[pkScript]; !ok {
userHistory = &model.BRC20UserHistory{}
g.UserAllHistory[pkScript] = userHistory
} else {
userHistory = history
}
return userHistory
}
func (g *BRC20ModuleIndexer) GetBRC20HistoryByUserForAPI(pkScript string) (userHistory *model.BRC20UserHistory) {
if history, ok := g.UserAllHistory[pkScript]; !ok {
userHistory = &model.BRC20UserHistory{}
} else {
userHistory = history
}
return userHistory
}
func (g *BRC20ModuleIndexer) UpdateHistoryHeightAndGetHistoryIndex(historyObj *model.BRC20History) uint32 {
height := historyObj.Height
history := g.HistoryCount
g.HistoryData = append(g.HistoryData, historyObj.Marshal())
g.HistoryCount += 1
if height == g.LastHistoryHeight || height == constant.MEMPOOL_HEIGHT {
return history
}
if g.LastHistoryHeight == 0 {
g.FirstHistoryByHeight[height] = history
} else {
for h := g.LastHistoryHeight + 1; h <= height; h++ {
g.FirstHistoryByHeight[h] = history
}
}
g.LastHistoryHeight = height
return history
}
func (g *BRC20ModuleIndexer) initBRC20() {
g.EnableHistory = true
g.BestHeight = 0
g.HistoryCount = 0
g.HistoryData = make([][]byte, 0)
g.FirstHistoryByHeight = make(map[uint32]uint32, 0)
g.LastHistoryHeight = 0
// all history
g.AllHistory = make([]uint32, 0)
// user history
g.UserAllHistory = make(map[string]*model.BRC20UserHistory, 0)
// all ticker info
g.InscriptionsTickerInfoMap = make(map[string]*model.BRC20TokenInfo, 0)
@@ -31,94 +130,324 @@ func (g *BRC20Indexer) initBRC20() {
g.TokenUsersBalanceData = make(map[string]map[string]*model.BRC20TokenBalance, 0)
// valid brc20 inscriptions
g.InscriptionsValidBRC20DataMap = make(map[string]*model.InscriptionBRC20TickInfo, 0)
g.InscriptionsValidBRC20DataMap = make(map[string]*model.InscriptionBRC20InfoResp, 0)
// inner valid transfer
g.InscriptionsValidTransferMap = make(map[string]*model.InscriptionBRC20TickTransferInfo, 0)
g.InscriptionsValidTransferMap = make(map[string]*model.InscriptionBRC20TickInfo, 0)
// inner invalid transfer
g.InscriptionsInvalidTransferMap = make(map[string]*model.InscriptionBRC20TickTransferInfo, 0)
g.InscriptionsInvalidTransferMap = make(map[string]*model.InscriptionBRC20TickInfo, 0)
}
func isJson(contentBody []byte) bool {
if len(contentBody) < 40 {
return false
}
func (g *BRC20ModuleIndexer) initModule() {
// all modules info
g.ModulesInfoMap = make(map[string]*model.BRC20ModuleSwapInfo, 0)
content := bytes.TrimSpace(contentBody)
if !bytes.HasPrefix(content, []byte("{")) {
return false
}
if !bytes.HasSuffix(content, []byte("}")) {
return false
}
// module of users [address]moduleid
g.UsersModuleWithTokenMap = make(map[string]string, 0)
return true
// swap
// module of users [address]moduleid
g.UsersModuleWithLpTokenMap = make(map[string]string, 0)
// runtime for approve
g.InscriptionsValidApproveMap = make(map[string]*model.InscriptionBRC20SwapInfo, 0)
g.InscriptionsInvalidApproveMap = make(map[string]*model.InscriptionBRC20SwapInfo, 0)
// runtime for conditional approve
g.InscriptionsValidConditionalApproveMap = make(map[string]*model.InscriptionBRC20SwapConditionalApproveInfo, 0)
g.InscriptionsInvalidConditionalApproveMap = make(map[string]*model.InscriptionBRC20SwapConditionalApproveInfo, 0)
// runtime for commit
g.InscriptionsValidCommitMap = make(map[string]*model.InscriptionBRC20Data, 0) // inner valid commit
g.InscriptionsInvalidCommitMap = make(map[string]*model.InscriptionBRC20Data, 0)
g.InscriptionsValidCommitMapById = make(map[string]*model.InscriptionBRC20Data, 0) // inner valid commit
}
// ProcessUpdateLatestBRC20
func (g *BRC20Indexer) ProcessUpdateLatestBRC20(brc20Datas []*model.InscriptionBRC20Data) {
totalDataCount := len(brc20Datas)
func (g *BRC20ModuleIndexer) GenerateApproveEventsByTransfer(inscription, tick, from, to string, amt *decimal.Decimal) (events []*model.ConditionalApproveEvent) {
transStateStatic := &model.TransferStateForConditionalApprove{
Tick: tick,
From: from,
To: to,
Balance: decimal.NewDecimalCopy(amt), // maybe no need copy
InscriptionId: inscription,
Max: amt.String(),
}
// First, globally save the transfer status.
g.TxStaticTransferStatesForConditionalApprove = append(g.TxStaticTransferStatesForConditionalApprove, transStateStatic)
// Then process each module one by one.
for _, moduleInfo := range g.ModulesInfoMap {
if g.ThisTxId != moduleInfo.ThisTxId {
// For the first time processing the transfer event within the module, you need to clear the status first.
moduleInfo.TransferStatesForConditionalApprove = nil
moduleInfo.ApproveStatesForConditionalApprove = nil
moduleInfo.ThisTxId = g.ThisTxId
}
g.initBRC20()
log.Printf("ProcessUpdateLatestBRC20 update. total %d", len(brc20Datas))
// Skip processing the transfer directly when there is no approve status.
if len(moduleInfo.ApproveStatesForConditionalApprove) == 0 {
continue
}
for idx, data := range brc20Datas {
progress := idx * 100 / totalDataCount
transState := &model.TransferStateForConditionalApprove{
Tick: tick,
From: from,
To: to,
Balance: decimal.NewDecimalCopy(amt), // maybe no need copy
InscriptionId: inscription,
Max: amt.String(),
}
// is sending transfer
if data.IsTransfer {
// transfer
if transferInfo, isInvalid := g.GetTransferInfoByKey(data.CreateIdxKey); transferInfo != nil {
g.ProcessTransfer(idx, data, transferInfo, isInvalid)
innerEvents := moduleInfo.GenerateApproveEventsByTransfer(transState)
events = append(events, innerEvents...)
}
return events
}
func (g *BRC20ModuleIndexer) GenerateApproveEventsByApprove(owner string, balance *decimal.Decimal,
data *model.InscriptionBRC20Data, approveInfo *model.InscriptionBRC20SwapConditionalApproveInfo) (events []*model.ConditionalApproveEvent) {
if moduleInfo, ok := g.ModulesInfoMap[approveInfo.Module]; ok {
log.Printf("generate approve event. module: %s", moduleInfo.ID)
if g.ThisTxId != moduleInfo.ThisTxId {
// First appearance, clear status
moduleInfo.TransferStatesForConditionalApprove = nil
moduleInfo.ApproveStatesForConditionalApprove = nil
moduleInfo.ThisTxId = g.ThisTxId
log.Printf("generate approve event. init")
}
// First appearance of approve, copy all global transfer events.
if len(moduleInfo.ApproveStatesForConditionalApprove) == 0 {
moduleInfo.TransferStatesForConditionalApprove = nil
for _, s := range g.TxStaticTransferStatesForConditionalApprove {
moduleInfo.TransferStatesForConditionalApprove = append(moduleInfo.TransferStatesForConditionalApprove, s)
}
log.Printf("generate approve event. copy transfer")
}
log.Printf("generate approve event. balance: %s", balance.String())
innerEvents := moduleInfo.GenerateApproveEventsByApprove(owner, balance, data, approveInfo)
events = append(events, innerEvents...)
}
return events
}
func (copyDup *BRC20ModuleIndexer) deepCopyBRC20Data(base *BRC20ModuleIndexer) {
// history
copyDup.BestHeight = base.BestHeight
copyDup.EnableHistory = base.EnableHistory
copyDup.HistoryCount = base.HistoryCount
for height, history := range base.FirstHistoryByHeight {
copyDup.FirstHistoryByHeight[height] = history
}
copyDup.LastHistoryHeight = base.LastHistoryHeight
for _, h := range base.HistoryData {
copyDup.HistoryData = append(copyDup.HistoryData, h)
}
copyDup.AllHistory = make([]uint32, len(base.AllHistory))
copy(copyDup.AllHistory, base.AllHistory)
// userhistory
for u, userHistory := range base.UserAllHistory {
h := &model.BRC20UserHistory{
History: make([]uint32, len(userHistory.History)),
}
copy(h.History, userHistory.History)
copyDup.UserAllHistory[u] = h
}
for k, v := range base.InscriptionsTickerInfoMap {
tinfo := &model.BRC20TokenInfo{
Ticker: v.Ticker,
Deploy: v.Deploy.DeepCopy(),
}
// history
tinfo.History = make([]uint32, len(v.History))
copy(tinfo.History, v.History)
tinfo.HistoryMint = make([]uint32, len(v.HistoryMint))
copy(tinfo.HistoryMint, v.HistoryMint)
tinfo.HistoryInscribeTransfer = make([]uint32, len(v.HistoryInscribeTransfer))
copy(tinfo.HistoryInscribeTransfer, v.HistoryInscribeTransfer)
tinfo.HistoryTransfer = make([]uint32, len(v.HistoryTransfer))
copy(tinfo.HistoryTransfer, v.HistoryTransfer)
// set info
copyDup.InscriptionsTickerInfoMap[k] = tinfo
}
for u, userTokens := range base.UserTokensBalanceData {
userTokensCopy := make(map[string]*model.BRC20TokenBalance, 0)
copyDup.UserTokensBalanceData[u] = userTokensCopy
for uniqueLowerTicker, v := range userTokens {
tb := v.DeepCopy()
userTokensCopy[uniqueLowerTicker] = tb
tokenUsers, ok := copyDup.TokenUsersBalanceData[uniqueLowerTicker]
if !ok {
tokenUsers = make(map[string]*model.BRC20TokenBalance, 0)
copyDup.TokenUsersBalanceData[uniqueLowerTicker] = tokenUsers
}
tokenUsers[u] = tb
}
}
for k, v := range base.InscriptionsValidBRC20DataMap {
copyDup.InscriptionsValidBRC20DataMap[k] = v
}
// transferInfo
for k, v := range base.InscriptionsValidTransferMap {
copyDup.InscriptionsValidTransferMap[k] = v
}
// fixme: disable invalid copy
for k, v := range base.InscriptionsInvalidTransferMap {
copyDup.InscriptionsInvalidTransferMap[k] = v
}
log.Printf("deepCopyBRC20Data finish. total: %d", len(base.InscriptionsTickerInfoMap))
}
func (copyDup *BRC20ModuleIndexer) cherryPickBRC20Data(base *BRC20ModuleIndexer, pickUsersPkScript, pickTokensTick map[string]bool) {
for lowerTick := range pickTokensTick {
v, ok := base.InscriptionsTickerInfoMap[lowerTick]
if !ok {
continue
}
tinfo := &model.BRC20TokenInfo{
Ticker: v.Ticker,
Deploy: v.Deploy.DeepCopy(),
}
copyDup.InscriptionsTickerInfoMap[lowerTick] = tinfo
}
for u := range pickUsersPkScript {
userTokens, ok := base.UserTokensBalanceData[u]
if !ok {
continue
}
userTokensCopy := make(map[string]*model.BRC20TokenBalance, 0)
for lowerTick := range pickTokensTick {
balance, ok := userTokens[lowerTick]
if !ok {
continue
}
continue
}
if ok := isJson(data.ContentBody); !ok {
continue
}
body := new(model.InscriptionBRC20Content)
if err := body.Unmarshal(data.ContentBody); err != nil {
continue
}
data.ContentBody = nil
// is inscribe deploy/mint/transfer
if body.Proto != constant.BRC20_P || len(body.BRC20Tick) != 4 {
continue
}
if body.Proto == constant.BRC20_P && body.Operation == constant.BRC20_OP_DEPLOY { // op deploy
g.ProcessDeploy(progress, data, body)
} else if body.Proto == constant.BRC20_P && body.Operation == constant.BRC20_OP_MINT { // op mint
g.ProcessMint(progress, data, body)
} else if body.Proto == constant.BRC20_P && body.Operation == constant.BRC20_OP_TRANSFER { // op transfer
g.ProcessInscribeTransfer(progress, data, body)
} else {
continue
userTokensCopy[lowerTick] = balance.DeepCopy()
}
copyDup.UserTokensBalanceData[u] = userTokensCopy
}
for _, holdersBalanceMap := range g.TokenUsersBalanceData {
for key, balance := range holdersBalanceMap {
if balance.OverallBalance.Sign() <= 0 {
delete(holdersBalanceMap, key)
for u, userTokens := range copyDup.UserTokensBalanceData {
for uniqueLowerTicker, balance := range userTokens {
tokenUsers, ok := copyDup.TokenUsersBalanceData[uniqueLowerTicker]
if !ok {
tokenUsers = make(map[string]*model.BRC20TokenBalance, 0)
copyDup.TokenUsersBalanceData[uniqueLowerTicker] = tokenUsers
}
tokenUsers[u] = balance
}
}
log.Printf("ProcessUpdateLatestBRC20 finish. ticker: %d, users: %d, tokens: %d, validInscription: %d, validTransfer: %d, invalidTransfer: %d",
len(g.InscriptionsTickerInfoMap),
len(g.UserTokensBalanceData),
len(g.TokenUsersBalanceData),
len(g.InscriptionsValidBRC20DataMap),
len(g.InscriptionsValidTransferMap),
len(g.InscriptionsInvalidTransferMap),
)
log.Printf("cherryPickBRC20Data finish. total: %d", len(copyDup.InscriptionsTickerInfoMap))
}
func (copyDup *BRC20ModuleIndexer) deepCopyModuleData(base *BRC20ModuleIndexer) {
for module, info := range base.ModulesInfoMap {
copyDup.ModulesInfoMap[module] = info.DeepCopy()
}
// module of users
for k, v := range base.UsersModuleWithTokenMap {
copyDup.UsersModuleWithTokenMap[k] = v
}
// module lp of users
for k, v := range base.UsersModuleWithLpTokenMap {
copyDup.UsersModuleWithLpTokenMap[k] = v
}
// approveInfo
for k, v := range base.InscriptionsValidApproveMap {
copyDup.InscriptionsValidApproveMap[k] = v
}
for k, v := range base.InscriptionsInvalidApproveMap {
copyDup.InscriptionsInvalidApproveMap[k] = v
}
// conditional approveInfo
for k, v := range base.InscriptionsValidConditionalApproveMap {
copyDup.InscriptionsValidConditionalApproveMap[k] = v.DeepCopy()
}
for k, v := range base.InscriptionsInvalidConditionalApproveMap {
copyDup.InscriptionsInvalidConditionalApproveMap[k] = v.DeepCopy()
}
// commitInfo
for k, v := range base.InscriptionsValidCommitMap {
copyDup.InscriptionsValidCommitMap[k] = v
}
for k, v := range base.InscriptionsInvalidCommitMap {
copyDup.InscriptionsInvalidCommitMap[k] = v
}
for k, v := range base.InscriptionsValidCommitMapById {
copyDup.InscriptionsValidCommitMapById[k] = v
}
// runtime state
copyDup.ThisTxId = base.ThisTxId
for _, v := range base.TxStaticTransferStatesForConditionalApprove {
copyDup.TxStaticTransferStatesForConditionalApprove = append(copyDup.TxStaticTransferStatesForConditionalApprove, v.DeepCopy())
}
log.Printf("deepCopyModuleData finish. total: %d", len(base.ModulesInfoMap))
}
func (copyDup *BRC20ModuleIndexer) cherryPickModuleData(base *BRC20ModuleIndexer, module string, pickUsersPkScript, pickTokensTick, pickPoolsPair map[string]bool) {
info, ok := base.ModulesInfoMap[module]
if ok {
copyDup.ModulesInfoMap[module] = info.CherryPick(pickUsersPkScript, pickTokensTick, pickPoolsPair)
}
// Data required for verification
for k, v := range base.InscriptionsValidCommitMapById {
copyDup.InscriptionsValidCommitMapById[k] = v
}
log.Printf("cherryPickModuleData finish. total: %d", len(base.ModulesInfoMap))
}
func (base *BRC20ModuleIndexer) DeepCopy() (copyDup *BRC20ModuleIndexer) {
log.Printf("DeepCopy enter")
copyDup = &BRC20ModuleIndexer{}
copyDup.Init()
copyDup.deepCopyBRC20Data(base)
copyDup.deepCopyModuleData(base)
return copyDup
}
func (base *BRC20ModuleIndexer) CherryPick(module string, pickUsersPkScript, pickTokensTick, pickPoolsPair map[string]bool) (copyDup *BRC20ModuleIndexer) {
log.Printf("CherryPick enter")
copyDup = &BRC20ModuleIndexer{}
copyDup.Init()
moduleInfo, ok := base.ModulesInfoMap[module]
if ok {
lowerTick := strings.ToLower(moduleInfo.GasTick)
pickTokensTick[lowerTick] = true
}
copyDup.cherryPickBRC20Data(base, pickUsersPkScript, pickTokensTick)
copyDup.cherryPickModuleData(base, module, pickUsersPkScript, pickTokensTick, pickPoolsPair)
return copyDup
}

204
indexer/module_approve.go Normal file
View File

@@ -0,0 +1,204 @@
package indexer
import (
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"log"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20ModuleIndexer) GetApproveInfoByKey(createIdxKey string) (
approveInfo *model.InscriptionBRC20SwapInfo, isInvalid bool) {
var ok bool
// approve
approveInfo, ok = g.InscriptionsValidApproveMap[createIdxKey]
if !ok {
approveInfo, ok = g.InscriptionsInvalidApproveMap[createIdxKey]
if !ok {
approveInfo = nil
}
isInvalid = true
}
return approveInfo, isInvalid
}
func (g *BRC20ModuleIndexer) ProcessApprove(data *model.InscriptionBRC20Data, approveInfo *model.InscriptionBRC20SwapInfo, isInvalid bool) error {
// ticker
uniqueLowerTicker := strings.ToLower(approveInfo.Tick)
if _, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]; !ok {
return errors.New("approve, invalid ticker")
}
moduleInfo, ok := g.ModulesInfoMap[approveInfo.Module]
if !ok {
log.Printf("ProcessBRC20Approve send approve, but ticker invalid. txid: %s",
hex.EncodeToString(utils.ReverseBytes([]byte(data.TxId))),
)
return errors.New("approve, module invalid")
}
// from
// get user's tokens to update
fromUserTokens, ok := moduleInfo.UsersTokenBalanceDataMap[string(approveInfo.Data.PkScript)]
if !ok {
log.Printf("ProcessBRC20Approve send from user missing. height: %d, txidx: %d",
data.Height,
data.TxIdx,
)
return errors.New("approve, send from user missing")
}
// get tokenBalance to update
fromTokenBalance, ok := fromUserTokens[uniqueLowerTicker]
if !ok {
log.Printf("ProcessBRC20Approve send from ticker missing. height: %d, txidx: %d",
data.Height,
data.TxIdx,
)
return errors.New("approve, send from ticker missing")
}
// Cross-check whether the approve-inscription exists.
if _, ok := fromTokenBalance.ValidApproveMap[data.CreateIdxKey]; !ok {
log.Printf("ProcessBRC20Approve send from approve missing(dup approve?). height: %d, txidx: %d",
data.Height,
data.TxIdx,
)
return errors.New("approve, send from approve missing(dup)")
}
// to address
receiverPkScript := string(data.PkScript)
if data.Satoshi == 0 {
receiverPkScript = string(approveInfo.Data.PkScript)
data.PkScript = receiverPkScript
}
// global history
historyData := &model.BRC20SwapHistoryApproveData{
Tick: approveInfo.Tick,
Amount: approveInfo.Amount.String(),
}
history := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_SWAP_TYPE_N_APPROVE, approveInfo.Data, data, historyData, !isInvalid)
moduleInfo.History = append(moduleInfo.History, history)
if isInvalid {
// from invalid history
fromHistory := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_SWAP_TYPE_N_APPROVE_FROM, approveInfo.Data, data, nil, false)
fromTokenBalance.History = append(fromTokenBalance.History, fromHistory)
return nil
}
// to
tokenBalance := moduleInfo.GetUserTokenBalance(approveInfo.Tick, receiverPkScript)
// set from
fromTokenBalance.ApproveableBalance = fromTokenBalance.ApproveableBalance.Sub(approveInfo.Amount)
delete(fromTokenBalance.ValidApproveMap, data.CreateIdxKey)
fromHistory := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_SWAP_TYPE_N_APPROVE_FROM, approveInfo.Data, data, nil, true)
fromTokenBalance.History = append(fromTokenBalance.History, fromHistory)
// set to
if data.BlockTime > 0 {
tokenBalance.SwapAccountBalanceSafe = tokenBalance.SwapAccountBalanceSafe.Add(approveInfo.Amount)
}
tokenBalance.SwapAccountBalance = tokenBalance.SwapAccountBalance.Add(approveInfo.Amount)
toHistory := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_SWAP_TYPE_N_APPROVE_TO, approveInfo.Data, data, nil, true)
tokenBalance.History = append(tokenBalance.History, toHistory)
return nil
}
func (g *BRC20ModuleIndexer) ProcessInscribeApprove(data *model.InscriptionBRC20Data) error {
var body model.InscriptionBRC20ModuleSwapApproveContent
if err := json.Unmarshal(data.ContentBody, &body); err != nil {
log.Printf("parse approve json failed. txid: %s",
hex.EncodeToString(utils.ReverseBytes([]byte(data.TxId))),
)
return err
}
// lower case moduleid only
if body.Module != strings.ToLower(body.Module) {
return errors.New("module id invalid")
}
moduleInfo, ok := g.ModulesInfoMap[body.Module]
if !ok { // invalid module
return errors.New("module invalid")
}
if len(body.Tick) != 4 {
return errors.New("tick invalid")
}
uniqueLowerTicker := strings.ToLower(body.Tick)
tokenInfo, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]
if !ok {
return errors.New("tick not exist")
}
tinfo := tokenInfo.Deploy
amt, err := decimal.NewDecimalFromString(body.Amount, int(tinfo.Decimal))
if err != nil {
return errors.New(fmt.Sprintf("approve amount invalid: %s", body.Amount))
}
if amt.Sign() <= 0 || amt.Cmp(tinfo.Max) > 0 {
return errors.New("amount out of range")
}
balanceApprove := decimal.NewDecimalCopy(amt)
// Unify ticker case
body.Tick = tokenInfo.Ticker
// Set up approve data for subsequent use.
approveInfo := &model.InscriptionBRC20SwapInfo{
Data: data,
}
approveInfo.Module = body.Module
approveInfo.Tick = tokenInfo.Ticker
approveInfo.Amount = balanceApprove
// global history
historyData := &model.BRC20SwapHistoryApproveData{
Tick: approveInfo.Tick,
Amount: approveInfo.Amount.String(),
}
history := model.NewBRC20ModuleHistory(false, constant.BRC20_HISTORY_SWAP_TYPE_N_INSCRIBE_APPROVE, data, data, historyData, true)
moduleInfo.History = append(moduleInfo.History, history)
// Check if the module balance is sufficient to approve
moduleTokenBalance := moduleInfo.GetUserTokenBalance(approveInfo.Tick, data.PkScript)
// available > amt
if moduleTokenBalance.AvailableBalance.Cmp(balanceApprove) < 0 { // invalid
history.Valid = false
g.InscriptionsInvalidApproveMap[data.CreateIdxKey] = approveInfo
} else {
history.Valid = true
// The available balance here needs to be directly deducted and transferred to ApproveableBalance.
moduleTokenBalance.AvailableBalanceSafe = moduleTokenBalance.AvailableBalanceSafe.Sub(balanceApprove)
moduleTokenBalance.AvailableBalance = moduleTokenBalance.AvailableBalance.Sub(balanceApprove)
moduleTokenBalance.ApproveableBalance = moduleTokenBalance.ApproveableBalance.Add(balanceApprove)
// Update personal approve lookup table ValidApproveMap
if moduleTokenBalance.ValidApproveMap == nil {
moduleTokenBalance.ValidApproveMap = make(map[string]*model.InscriptionBRC20Data, 1)
}
moduleTokenBalance.ValidApproveMap[data.CreateIdxKey] = data
// Update global approve lookup table
g.InscriptionsValidApproveMap[data.CreateIdxKey] = approveInfo
// g.InscriptionsValidBRC20DataMap[data.CreateIdxKey] = approveInfo.Data // fixme
}
return nil
}

140
indexer/module_commit.go Normal file
View File

@@ -0,0 +1,140 @@
package indexer
import (
"encoding/hex"
"encoding/json"
"errors"
"log"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20ModuleIndexer) GetCommitInfoByKey(createIdxKey string) (
commitData *model.InscriptionBRC20Data, isInvalid bool) {
var ok bool
// commit
commitData, ok = g.InscriptionsValidCommitMap[createIdxKey]
if !ok {
commitData, ok = g.InscriptionsInvalidCommitMap[createIdxKey]
if !ok {
commitData = nil
}
isInvalid = true
}
return commitData, isInvalid
}
func (g *BRC20ModuleIndexer) ProcessCommit(dataFrom, dataTo *model.InscriptionBRC20Data, isInvalid bool) error {
inscriptionId := dataFrom.GetInscriptionId()
log.Printf("parse move commit. inscription id: %s", inscriptionId)
// Delete the already sent commit
delete(g.InscriptionsValidCommitMapById, inscriptionId)
var body *model.InscriptionBRC20ModuleSwapCommitContent
if err := json.Unmarshal(dataFrom.ContentBody, &body); err != nil {
log.Printf("parse module commit json failed. txid: %s",
hex.EncodeToString(utils.ReverseBytes([]byte(dataTo.TxId))),
)
return errors.New("json")
}
// Check the inscription reception address, it must be a module address.
moduleId, ok := utils.GetModuleFromScript([]byte(dataTo.PkScript))
if !ok || moduleId != body.Module {
return errors.New("commit, not send to module")
}
// check module exist
moduleInfo, ok := g.ModulesInfoMap[body.Module]
if !ok {
return errors.New("commit, module not exist")
}
// preset invalid
moduleInfo.CommitInvalidMap[inscriptionId] = struct{}{}
// Check the inscription sending address, it must be the sequencer address.
if moduleInfo.SequencerPkScript != dataFrom.PkScript {
return errors.New("module sequencer invalid")
}
eachFuntionSize, err := GetEachItemLengthOfCommitJsonData(dataFrom.ContentBody)
if err != nil || len(body.Data) != len(eachFuntionSize) {
return errors.New("commit, get function size failed")
}
log.Printf("ProcessCommitVerify commit[%s] ", inscriptionId)
var pickUsersPkScript = make(map[string]bool, 0)
var pickTokensTick = make(map[string]bool, 0)
var pickPoolsPair = make(map[string]bool, 0)
g.InitCherryPickFilter(body, pickUsersPkScript, pickTokensTick, pickPoolsPair)
swapState := g.CherryPick(body.Module, pickUsersPkScript, pickTokensTick, pickPoolsPair)
// Need to cherrypick, then verify on the copy.
if idx, _, err := swapState.ProcessCommitVerify(inscriptionId, body, eachFuntionSize, nil); err != nil {
log.Printf("commit invalid, function[%d] %s, txid: %s", idx, err, hex.EncodeToString([]byte(dataTo.TxId)))
return err
}
// Execute in reality if successful.
if idx, _, err := g.ProcessCommitVerify(inscriptionId, body, eachFuntionSize, nil); err != nil {
log.Printf("commit invalid, function[%d] %s, txid: %s", idx, err, hex.EncodeToString([]byte(dataTo.TxId)))
return err
}
// set commit id
moduleInfo.CommitIdMap[inscriptionId] = struct{}{}
moduleInfo.CommitIdChainMap[body.Parent] = struct{}{}
// valid
delete(moduleInfo.CommitInvalidMap, inscriptionId)
history := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_SWAP_TYPE_N_COMMIT, dataFrom, dataTo, nil, true)
moduleInfo.History = append(moduleInfo.History, history)
return nil
}
func GetCommitParentFromData(data *model.InscriptionBRC20Data) (string, error) {
var body *model.InscriptionBRC20ModuleSwapCommitContent
if err := json.Unmarshal(data.ContentBody, &body); err != nil {
return "", errors.New("json")
}
return body.Parent, nil
}
func (g *BRC20ModuleIndexer) ProcessCommitCheck(data *model.InscriptionBRC20Data) (int, error) {
var body *model.InscriptionBRC20ModuleSwapCommitContent
if err := json.Unmarshal(data.ContentBody, &body); err != nil {
return -1, errors.New("json")
}
// check module exist
moduleInfo, ok := g.ModulesInfoMap[body.Module]
if !ok {
return -1, errors.New("commit, module not exist")
}
eachFuntionSize, err := GetEachItemLengthOfCommitJsonData(data.ContentBody)
if err != nil || len(body.Data) != len(eachFuntionSize) {
return -1, errors.New("commit, get function size failed")
}
inscriptionId := data.GetInscriptionId()
log.Printf("ProcessCommitVerify commit[%s] ", inscriptionId)
idx, _, err := g.ProcessCommitVerify(inscriptionId, body, eachFuntionSize, nil)
if err != nil {
return idx, err
}
// set commit id
moduleInfo.CommitIdMap[inscriptionId] = struct{}{}
moduleInfo.CommitIdChainMap[body.Parent] = struct{}{}
// Delete the already sent commit
delete(g.InscriptionsValidCommitMapById, inscriptionId)
return 0, nil
}

View File

@@ -0,0 +1,177 @@
package indexer
import (
"errors"
"log"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20ModuleIndexer) ProcessCommitFunctionAddLiquidity(moduleInfo *model.BRC20ModuleSwapInfo, f *model.SwapFunctionData) error {
token0, token1, err := utils.DecodeTokensFromSwapPair(f.Params[0])
if err != nil {
return errors.New("func: addLiq poolPair invalid")
}
poolPair := GetLowerInnerPairNameByToken(token0, token1)
pool, ok := moduleInfo.SwapPoolTotalBalanceDataMap[poolPair]
if !ok {
return errors.New("addLiq: pool invalid")
}
usersLpBalanceInPool, ok := moduleInfo.LPTokenUsersBalanceMap[poolPair]
if !ok {
return errors.New("addLiq: users invalid")
}
// log.Printf("[%s] pool before addliq [%s] %s: %s, %s: %s, lp: %s", moduleInfo.ID, poolPair, pool.Tick[0], pool.TickBalance[0], pool.Tick[1], pool.TickBalance[1], pool.LpBalance)
log.Printf("pool addliq params: %v", f.Params)
token0AmtStr := f.Params[1]
token1AmtStr := f.Params[2]
tokenLpAmtStr := f.Params[3]
token0Amt, _ := g.CheckTickVerify(token0, token0AmtStr)
token1Amt, _ := g.CheckTickVerify(token1, token1AmtStr)
tokenLpAmt, _ := decimal.NewDecimalFromString(tokenLpAmtStr, 18)
// LP Balance Slippage Check
slippageAmtStr := f.Params[4]
slippageAmt, _ := decimal.NewDecimalFromString(slippageAmtStr, 3)
var token0Idx, token1Idx int
if token0 == pool.Tick[0] {
token0Idx = 0
token1Idx = 1
} else {
token0Idx = 1
token1Idx = 0
}
var first bool = false
var lpForPool, lpForUser *decimal.Decimal
if pool.TickBalance[0].Sign() == 0 && pool.TickBalance[1].Sign() == 0 {
first = true
lpForPool = token0Amt.Mul(token1Amt).Sqrt()
if lpForPool.Cmp(decimal.NewDecimal(1000, 18)) < 0 {
return errors.New("addLiq: lp less than 1000")
}
lpForUser = lpForPool.Sub(decimal.NewDecimal(1000, 18))
} else {
// Issuing additional LP, as a way of collecting service fees.
feeRateSwapAmt, ok := CheckAmountVerify(moduleInfo.FeeRateSwap, 3)
if !ok {
log.Printf("pool addliq FeeRateSwap invalid: %s", moduleInfo.FeeRateSwap)
return errors.New("addLiq: feerate swap invalid")
}
if feeRateSwapAmt.Sign() > 0 {
// lp = (poolLp * (rootK - rootKLast)) / (rootK * 5 + rootKLast)
rootK := pool.TickBalance[token0Idx].Mul(pool.TickBalance[token1Idx]).Sqrt()
lpFee := pool.LpBalance.Mul(rootK.Sub(pool.LastRootK)).Div(
rootK.Mul(decimal.NewDecimal(5, 0)).Add(pool.LastRootK))
log.Printf("pool addliq issue lp: %s", lpFee.String())
if lpFee.Sign() > 0 {
// pool lp update
pool.LpBalance = pool.LpBalance.Add(lpFee)
// lpFee lp balance update
lpFeelpbalance := usersLpBalanceInPool[moduleInfo.LpFeePkScript]
lpFeelpbalance = lpFeelpbalance.Add(lpFee)
usersLpBalanceInPool[moduleInfo.LpFeePkScript] = lpFeelpbalance
// lpFee-lp-balance
lpFeelpsBalance, ok := moduleInfo.UsersLPTokenBalanceMap[moduleInfo.LpFeePkScript]
if !ok {
lpFeelpsBalance = make(map[string]*decimal.Decimal, 0)
moduleInfo.UsersLPTokenBalanceMap[moduleInfo.LpFeePkScript] = lpFeelpsBalance
}
lpFeelpsBalance[poolPair] = lpFeelpbalance
}
}
// Calculate the amount of liquidity tokens acquired
token1AdjustAmt := pool.TickBalance[token1Idx].Mul(token0Amt).Div(pool.TickBalance[token0Idx])
if token1Amt.Cmp(token1AdjustAmt) >= 0 {
token1Amt = token1AdjustAmt
} else {
token0AdjustAmt := pool.TickBalance[token0Idx].Mul(token1Amt).Div(pool.TickBalance[token1Idx])
token0Amt = token0AdjustAmt
}
lp0 := pool.LpBalance.Mul(token0Amt).Div(pool.TickBalance[token0Idx])
lp1 := pool.LpBalance.Mul(token1Amt).Div(pool.TickBalance[token1Idx])
if lp0.Cmp(lp1) > 0 {
lpForPool = lp1
} else {
lpForPool = lp0
}
lpForUser = lpForPool
}
if lpForUser.Cmp(tokenLpAmt.Mul(decimal.NewDecimal(1000, 3).Sub(slippageAmt)).Div(decimal.NewDecimal(1000, 3))) < 0 {
log.Printf("user[%s], lp: %s < expect: %s. * %s", f.Address, lpForUser, tokenLpAmt, tokenLpAmt.Sub(tokenLpAmt.Mul(slippageAmt)))
return errors.New("addLiq: over slippage")
}
// User Balance Check
token0Balance := moduleInfo.GetUserTokenBalance(token0, f.PkScript)
token1Balance := moduleInfo.GetUserTokenBalance(token1, f.PkScript)
// fixme: Must use the confirmed amount
if token0Balance.SwapAccountBalance.Cmp(token0Amt) < 0 {
log.Printf("token0[%s] user[%s], balance %s", token0, f.Address, token0Balance)
return errors.New("addLiq: token0 balance insufficient")
}
// fixme: Must use the confirmed amount
if token1Balance.SwapAccountBalance.Cmp(token1Amt) < 0 {
log.Printf("token1[%s] user[%s], balance %s", token1, f.Address, token1Balance)
return errors.New("addLiq: token1 balance insufficient")
}
// User Real-time Balance Update
token0Balance.SwapAccountBalance = token0Balance.SwapAccountBalance.Sub(token0Amt)
token1Balance.SwapAccountBalance = token1Balance.SwapAccountBalance.Sub(token1Amt)
// fixme: User safety balance update
// lp balance update
// lp-user-balance
lpbalance := usersLpBalanceInPool[f.PkScript]
lpbalance = lpbalance.Add(lpForUser)
usersLpBalanceInPool[f.PkScript] = lpbalance
// user-lp-balance
lpsBalance, ok := moduleInfo.UsersLPTokenBalanceMap[f.PkScript]
if !ok {
lpsBalance = make(map[string]*decimal.Decimal, 0)
moduleInfo.UsersLPTokenBalanceMap[f.PkScript] = lpsBalance
}
lpsBalance[poolPair] = lpbalance
// zero address lp balance update
if first {
zerolpbalance := usersLpBalanceInPool[constant.ZERO_ADDRESS_PKSCRIPT]
zerolpbalance = zerolpbalance.Add(decimal.NewDecimal(1000, 18))
usersLpBalanceInPool[constant.ZERO_ADDRESS_PKSCRIPT] = zerolpbalance
// zerouser-lp-balance
zerolpsBalance, ok := moduleInfo.UsersLPTokenBalanceMap[constant.ZERO_ADDRESS_PKSCRIPT]
if !ok {
zerolpsBalance = make(map[string]*decimal.Decimal, 0)
moduleInfo.UsersLPTokenBalanceMap[constant.ZERO_ADDRESS_PKSCRIPT] = zerolpsBalance
}
zerolpsBalance[poolPair] = zerolpbalance
}
// Changes in pool balance
pool.TickBalance[token0Idx] = pool.TickBalance[token0Idx].Add(token0Amt)
pool.TickBalance[token1Idx] = pool.TickBalance[token1Idx].Add(token1Amt)
pool.LpBalance = pool.LpBalance.Add(lpForPool)
// update lastRootK
pool.LastRootK = pool.TickBalance[token0Idx].Mul(pool.TickBalance[token1Idx]).Sqrt()
// log.Printf("[%s] pool after addliq [%s] %s: %s, %s: %s, lp: %s", moduleInfo.ID, poolPair, pool.Tick[0], pool.TickBalance[0], pool.Tick[1], pool.TickBalance[1], pool.LpBalance)
return nil
}

View File

@@ -0,0 +1,30 @@
package indexer
import (
"errors"
"log"
"github.com/unisat-wallet/libbrc20-indexer/model"
)
func (g *BRC20ModuleIndexer) ProcessCommitFunctionDecreaseApproval(moduleInfo *model.BRC20ModuleSwapInfo, f *model.SwapFunctionData) error {
token := f.Params[0]
tokenAmtStr := f.Params[1]
tokenAmt, _ := g.CheckTickVerify(token, tokenAmtStr)
tokenBalance := moduleInfo.GetUserTokenBalance(token, f.PkScript)
// fixme: Must use the confirmed amount
if tokenBalance.SwapAccountBalance.Cmp(tokenAmt) < 0 {
log.Printf("token[%s] user[%s], balance %s", token, f.Address, tokenBalance)
return errors.New("decreaseApproval: token balance insufficient")
}
// User Real-time Balance Update
tokenBalance.SwapAccountBalance = tokenBalance.SwapAccountBalance.Sub(tokenAmt)
tokenBalance.AvailableBalance = tokenBalance.AvailableBalance.Add(tokenAmt)
log.Printf("pool decreaseApproval [%s] available: %s, swappable: %s", token, tokenBalance.AvailableBalance, tokenBalance.SwapAccountBalance)
return nil
}

View File

@@ -0,0 +1,41 @@
package indexer
import (
"errors"
"log"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
)
func (g *BRC20ModuleIndexer) ProcessCommitFunctionDeployPool(moduleInfo *model.BRC20ModuleSwapInfo, f *model.SwapFunctionData) error {
token0, token1 := f.Params[0], f.Params[1]
poolPair := GetLowerPairNameByToken(token0, token1)
if _, ok := moduleInfo.SwapPoolTotalBalanceDataMap[poolPair]; ok {
return errors.New("deploy: twice")
}
poolPairReverse := GetLowerPairNameByToken(token1, token0)
if _, ok := moduleInfo.SwapPoolTotalBalanceDataMap[poolPairReverse]; ok {
return errors.New("deploy: twice")
}
poolPair = GetLowerInnerPairNameByToken(token0, token1)
// lp token balance of address in module [pool][address]balance
moduleInfo.LPTokenUsersBalanceMap[poolPair] = make(map[string]*decimal.Decimal, 0)
token0Amt, _ := g.CheckTickVerify(token0, "0")
token1Amt, _ := g.CheckTickVerify(token1, "0")
// swap total balance
// total balance of pool in module [pool]balanceData
moduleInfo.SwapPoolTotalBalanceDataMap[poolPair] = &model.BRC20ModulePoolTotalBalance{
Tick: [2]string{token0, token1},
History: make([]*model.BRC20ModuleHistory, 0), // fixme:
// balance
TickBalance: [2]*decimal.Decimal{token0Amt, token1Amt},
}
log.Printf("[%s] pool deploy pool [%s]", moduleInfo.ID, poolPair)
return nil
}

View File

@@ -0,0 +1,36 @@
package indexer
import (
"encoding/hex"
"errors"
"log"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20ModuleIndexer) ProcessCommitFunctionGasFee(moduleInfo *model.BRC20ModuleSwapInfo, userPkScript string, gasAmt *decimal.Decimal) error {
tokenBalance := moduleInfo.GetUserTokenBalance(moduleInfo.GasTick, userPkScript)
// fixme: Must use the confirmed amount
if tokenBalance.SwapAccountBalance.Cmp(gasAmt) < 0 {
address, err := utils.GetAddressFromScript([]byte(userPkScript), conf.GlobalNetParams)
if err != nil {
address = hex.EncodeToString([]byte(userPkScript))
}
log.Printf("gas[%s] user[%s], balance %s", moduleInfo.GasTick, address, tokenBalance)
return errors.New("gas fee: token balance insufficient")
}
gasToBalance := moduleInfo.GetUserTokenBalance(moduleInfo.GasTick, moduleInfo.GasToPkScript)
// User Real-time gas Balance Update
tokenBalance.SwapAccountBalance = tokenBalance.SwapAccountBalance.Sub(gasAmt)
gasToBalance.SwapAccountBalance = gasToBalance.SwapAccountBalance.Add(gasAmt)
// log.Printf("gas fee[%s]: %s user: %s, gasTo: %s", moduleInfo.GasTick, gasAmt, tokenBalance.SwapAccountBalance, gasToBalance.SwapAccountBalance)
return nil
}

View File

@@ -0,0 +1,58 @@
package indexer
import (
"encoding/hex"
"encoding/json"
"errors"
"log"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
// ProcessInscribeCommit inscribed a commit, but it has not taken effect yet.
func (g *BRC20ModuleIndexer) ProcessInscribeCommit(data *model.InscriptionBRC20Data) (err error) {
inscriptionId := data.GetInscriptionId()
log.Printf("parse new inscribe commit. inscription id: %s", inscriptionId)
var body *model.InscriptionBRC20ModuleSwapCommitContent
if err := json.Unmarshal(data.ContentBody, &body); err != nil {
log.Printf("parse commit json failed. txid: %s",
hex.EncodeToString(utils.ReverseBytes([]byte(data.TxId))),
)
return errors.New("json invalid")
}
// lower case module id only
if body.Module != strings.ToLower(body.Module) {
return errors.New("module id invalid")
}
// check module exist
moduleInfo, ok := g.ModulesInfoMap[body.Module]
if !ok {
return errors.New("module invalid")
}
// preset invalid
moduleInfo.CommitInvalidMap[inscriptionId] = struct{}{}
// check sequencer match
if moduleInfo.SequencerPkScript != data.PkScript {
return errors.New("module sequencer invalid")
}
idx, err := g.ProcessInscribeCommitPreVerify(body)
if err != nil {
log.Printf("commit invalid inscribe. function[%d], %s, txid: %s", idx, err, hex.EncodeToString([]byte(data.TxId)))
return err
}
g.InscriptionsValidCommitMap[data.CreateIdxKey] = data
g.InscriptionsValidCommitMapById[inscriptionId] = data
// valid
delete(moduleInfo.CommitInvalidMap, inscriptionId)
return nil
}

View File

@@ -0,0 +1,142 @@
package indexer
import (
"errors"
"fmt"
"log"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20ModuleIndexer) ProcessCommitFunctionRemoveLiquidity(moduleInfo *model.BRC20ModuleSwapInfo, f *model.SwapFunctionData) error {
token0, token1, err := utils.DecodeTokensFromSwapPair(f.Params[0])
if err != nil {
return errors.New("func: removeLiq poolPair invalid")
}
poolPair := GetLowerInnerPairNameByToken(token0, token1)
pool, ok := moduleInfo.SwapPoolTotalBalanceDataMap[poolPair]
if !ok {
return errors.New("removeLiq: pool invalid")
}
usersLpBalanceInPool, ok := moduleInfo.LPTokenUsersBalanceMap[poolPair]
if !ok {
return errors.New("removeLiq: lps balance map missing pair")
}
lpsBalance, ok := moduleInfo.UsersLPTokenBalanceMap[f.PkScript]
if !ok {
return errors.New("removeLiq: users balance map missing user")
}
// log.Printf("[%s] pool before removeliq [%s] %s: %s, %s: %s, lp: %s", moduleInfo.ID, poolPair, pool.Tick[0], pool.TickBalance[0], pool.Tick[1], pool.TickBalance[1], pool.LpBalance)
log.Printf("pool removeliq params: %v", f.Params)
tokenLpAmtStr := f.Params[1]
token0AmtStr := f.Params[2]
token1AmtStr := f.Params[3]
token0Amt, _ := g.CheckTickVerify(token0, token0AmtStr)
token1Amt, _ := g.CheckTickVerify(token1, token1AmtStr)
tokenLpAmt, _ := decimal.NewDecimalFromString(tokenLpAmtStr, 18)
// LP Balance Slippage Check
slippageAmtStr := f.Params[4]
slippageAmt, _ := decimal.NewDecimalFromString(slippageAmtStr, 3)
var token0Idx, token1Idx int
if token0 == pool.Tick[0] {
token0Idx = 0
token1Idx = 1
} else {
token0Idx = 1
token1Idx = 0
}
// Increase LP, as a method of collecting service fees.
feeRateSwapAmt, _ := CheckAmountVerify(moduleInfo.FeeRateSwap, 3)
if feeRateSwapAmt.Sign() > 0 {
// lp = (poolLp * (rootK - rootKLast)) / (rootK * 5 + rootKLast)
rootK := pool.TickBalance[token0Idx].Mul(pool.TickBalance[token1Idx]).Sqrt()
lpFee := pool.LpBalance.Mul(rootK.Sub(pool.LastRootK)).Div(
rootK.Mul(decimal.NewDecimal(5, 0)).Add(pool.LastRootK))
if lpFee.Sign() > 0 {
// pool lp update
pool.LpBalance = pool.LpBalance.Add(lpFee)
// lpFee update
lpFeelpbalance := usersLpBalanceInPool[moduleInfo.LpFeePkScript]
lpFeelpbalance = lpFeelpbalance.Add(lpFee)
usersLpBalanceInPool[moduleInfo.LpFeePkScript] = lpFeelpbalance
// lpFee-lp-balance
lpFeelpsBalance, ok := moduleInfo.UsersLPTokenBalanceMap[moduleInfo.LpFeePkScript]
if !ok {
lpFeelpsBalance = make(map[string]*decimal.Decimal, 0)
moduleInfo.UsersLPTokenBalanceMap[moduleInfo.LpFeePkScript] = lpFeelpsBalance
}
lpFeelpsBalance[poolPair] = lpFeelpbalance
}
}
// Slippage Check
amt0 := pool.TickBalance[token0Idx].Mul(tokenLpAmt).Div(pool.LpBalance)
if amt0.Cmp(token0Amt.Sub(token0Amt.Mul(slippageAmt))) < 0 {
log.Printf("user[%s], token0: %s, expect: %s", f.Address, amt0, token0Amt)
return errors.New("removeLiq: over slippage")
}
amt1 := pool.TickBalance[token1Idx].Mul(tokenLpAmt).Div(pool.LpBalance)
if amt1.Cmp(token1Amt.Sub(token1Amt.Mul(slippageAmt))) < 0 {
log.Printf("user[%s], token1: %s, expect: %s", f.Address, amt1, token1Amt)
return errors.New("removeLiq: over slippage")
}
// Changes in pool balance
if pool.LpBalance.Cmp(tokenLpAmt) < 0 {
return errors.New(fmt.Sprintf("removeLiq: tokenLp balance insufficient, %s < %s", pool.LpBalance, tokenLpAmt))
}
if pool.TickBalance[token0Idx].Cmp(amt0) < 0 {
return errors.New(fmt.Sprintf("removeLiq: pool %s balance insufficient", pool.Tick[token1Idx]))
}
if pool.TickBalance[token1Idx].Cmp(amt1) < 0 {
return errors.New(fmt.Sprintf("removeLiq: pool %s balance insufficient", pool.Tick[token1Idx]))
}
// Check whether the user's LP balance is consistent (consider storing only one copy)
userbalance := usersLpBalanceInPool[f.PkScript]
lpBalance := lpsBalance[poolPair]
if userbalance.Cmp(lpBalance) != 0 {
return errors.New("removeLiq: user's tokenLp balance miss match")
}
// Check whether the balance of user LP is sufficient.
if userbalance.Cmp(tokenLpAmt) < 0 {
return errors.New(fmt.Sprintf("removeLiq: user's tokenLp balance insufficient, %s < %s", userbalance, tokenLpAmt))
}
if lpBalance.Cmp(tokenLpAmt) < 0 {
return errors.New(fmt.Sprintf("removeLiq: user's tokenLp balance insufficient, %s < %s", lpBalance, tokenLpAmt))
}
// update lp balance
usersLpBalanceInPool[f.PkScript] = userbalance.Sub(tokenLpAmt)
lpsBalance[poolPair] = lpBalance.Sub(tokenLpAmt)
token0Balance := moduleInfo.GetUserTokenBalance(token0, f.PkScript)
token1Balance := moduleInfo.GetUserTokenBalance(token1, f.PkScript)
// Obtains user token balance
token0Balance.SwapAccountBalance = token0Balance.SwapAccountBalance.Add(amt0)
token1Balance.SwapAccountBalance = token1Balance.SwapAccountBalance.Add(amt1)
pool.LpBalance = pool.LpBalance.Sub(tokenLpAmt) // fixme
// Deduct token balance in the pool
pool.TickBalance[token0Idx] = pool.TickBalance[token0Idx].Sub(amt0)
pool.TickBalance[token1Idx] = pool.TickBalance[token1Idx].Sub(amt1)
// update lastRootK
pool.LastRootK = pool.TickBalance[token0Idx].Mul(pool.TickBalance[token1Idx]).Sqrt()
// log.Printf("[%s] pool after removeliq [%s] %s: %s, %s: %s, lp: %s", moduleInfo.ID, poolPair, pool.Tick[0], pool.TickBalance[0], pool.Tick[1], pool.TickBalance[1], pool.LpBalance)
return nil
}

View File

@@ -0,0 +1,89 @@
package indexer
import (
"errors"
"fmt"
"log"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20ModuleIndexer) ProcessCommitFunctionSend(moduleInfo *model.BRC20ModuleSwapInfo, f *model.SwapFunctionData) error {
addressTo := f.Params[0]
pkScriptTo, _ := utils.GetPkScriptByAddress(addressTo, conf.GlobalNetParams)
tokenOrPair := f.Params[1]
tokenAmtStr := f.Params[2]
if len(tokenOrPair) == 4 {
tokenAmt, _ := g.CheckTickVerify(tokenOrPair, tokenAmtStr)
tokenBalanceFrom := moduleInfo.GetUserTokenBalance(tokenOrPair, f.PkScript)
// fixme: Must use the confirmed amount
if tokenBalanceFrom.SwapAccountBalance.Cmp(tokenAmt) < 0 {
log.Printf("token[%s] user[%s], balance %s", tokenOrPair, f.Address, tokenBalanceFrom)
return errors.New("send: token balance insufficient")
}
tokenBalanceTo := moduleInfo.GetUserTokenBalance(tokenOrPair, string(pkScriptTo))
// User Real-time Balance Update
tokenBalanceFrom.SwapAccountBalance = tokenBalanceFrom.SwapAccountBalance.Sub(tokenAmt)
tokenBalanceTo.SwapAccountBalance = tokenBalanceTo.SwapAccountBalance.Add(tokenAmt)
log.Printf("pool send [%s] swappable: %s -> %s", tokenOrPair, tokenBalanceFrom.SwapAccountBalance, tokenBalanceTo.SwapAccountBalance)
} else {
token0, token1, _ := utils.DecodeTokensFromSwapPair(tokenOrPair)
poolPair := GetLowerInnerPairNameByToken(token0, token1)
if _, ok := moduleInfo.SwapPoolTotalBalanceDataMap[poolPair]; !ok {
return errors.New("send: pool invalid")
}
usersLpBalanceInPool, ok := moduleInfo.LPTokenUsersBalanceMap[poolPair]
if !ok {
return errors.New("send: lps balance map missing pair")
}
// Check whether the lp user's balance storage is consistent (consider storing only one copy)
lpsBalanceFrom, ok := moduleInfo.UsersLPTokenBalanceMap[f.PkScript]
if !ok {
return errors.New("send: users balance map missing user")
}
lpBalanceFrom := lpsBalanceFrom[poolPair]
userbalanceFrom := usersLpBalanceInPool[f.PkScript]
if userbalanceFrom.Cmp(lpBalanceFrom) != 0 {
return errors.New("send: user's tokenLp balance miss match")
}
tokenLpAmt, _ := CheckAmountVerify(tokenAmtStr, 18)
// Check if the user's lp balance is sufficient.
if userbalanceFrom.Cmp(tokenLpAmt) < 0 {
return errors.New(fmt.Sprintf("send: user's tokenLp balance insufficient, %s < %s", userbalanceFrom, tokenLpAmt))
}
if lpBalanceFrom.Cmp(tokenLpAmt) < 0 {
return errors.New(fmt.Sprintf("send: user's tokenLp balance insufficient, %s < %s", lpBalanceFrom, tokenLpAmt))
}
// update from lp balance
usersLpBalanceInPool[f.PkScript] = userbalanceFrom.Sub(tokenLpAmt)
lpsBalanceFrom[poolPair] = lpBalanceFrom.Sub(tokenLpAmt)
// update to lp balance
lpBalanceTo := usersLpBalanceInPool[string(pkScriptTo)]
lpBalanceTo = lpBalanceTo.Add(tokenLpAmt)
usersLpBalanceInPool[string(pkScriptTo)] = lpBalanceTo
// touser-lp-balance
lpsBalanceTo, ok := moduleInfo.UsersLPTokenBalanceMap[string(pkScriptTo)]
if !ok {
lpsBalanceTo = make(map[string]*decimal.Decimal, 0)
moduleInfo.UsersLPTokenBalanceMap[string(pkScriptTo)] = lpsBalanceTo
}
lpsBalanceTo[poolPair] = lpBalanceTo
log.Printf("pool send [%s] lp: %s -> %s", tokenOrPair, lpBalanceFrom, lpBalanceTo)
}
return nil
}

View File

@@ -0,0 +1,153 @@
package indexer
import (
"errors"
"fmt"
"log"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
// ProcessCommitFunctionSwap
// exactIn:
//
// amountInWithFee = amountIn * 997
// amountOut = (amountInWithFee * reserveOut)/(reverseIn * 1000 + amountInWithFee)
//
// exactOut:
//
// amountIn = (reserveIn * amountOut * 1000)/((reserveOut - amountOut) * 997) + 1
func (g *BRC20ModuleIndexer) ProcessCommitFunctionSwap(moduleInfo *model.BRC20ModuleSwapInfo, f *model.SwapFunctionData) error {
token0, token1, err := utils.DecodeTokensFromSwapPair(f.Params[0])
if err != nil {
return errors.New("func: swap poolPair invalid")
}
poolPair := GetLowerInnerPairNameByToken(token0, token1)
pool, ok := moduleInfo.SwapPoolTotalBalanceDataMap[poolPair]
if !ok {
return errors.New("swap: pool invalid")
}
if token0 != pool.Tick[0] && token0 != pool.Tick[1] {
return errors.New("func: swap token invalid")
}
if token1 != pool.Tick[0] && token1 != pool.Tick[1] {
return errors.New("func: swap token invalid")
}
// log.Printf("[%s] pool before swap [%s] %s: %s, %s: %s, lp: %s", moduleInfo.ID, poolPair, pool.Tick[0], pool.TickBalance[0], pool.Tick[1], pool.TickBalance[1], pool.LpBalance)
log.Printf("pool swap params: %v", f.Params)
var tokenIn, tokenInAmtStr, tokenOut, tokenOutAmtStr string
derection := f.Params[3]
if derection == "exactIn" {
tokenIn = f.Params[1]
tokenInAmtStr = f.Params[2]
if tokenIn == token0 {
tokenOut = token1
} else {
tokenOut = token0
}
tokenOutAmtStr = f.Params[4]
} else if derection == "exactOut" {
tokenOut = f.Params[1]
tokenOutAmtStr = f.Params[2]
if tokenOut == token0 {
tokenIn = token1
} else {
tokenIn = token0
}
tokenInAmtStr = f.Params[4]
}
tokenInAmt, _ := g.CheckTickVerify(tokenIn, tokenInAmtStr)
tokenOutAmt, _ := g.CheckTickVerify(tokenOut, tokenOutAmtStr)
// Confirm the token order id of the pool
var tokenInIdx, tokenOutIdx int
if tokenIn == pool.Tick[0] {
tokenInIdx = 0
tokenOutIdx = 1
} else {
tokenInIdx = 1
tokenOutIdx = 0
}
// Please note that should use integer calculations here.
// support exactIn
// Slippage check
// exactIn: 1/(1+slippage) * quoteAmount
// exactOut: (1+slippage) * quoteAmount
slippageAmtStr := f.Params[5]
slippageAmt, _ := decimal.NewDecimalFromString(slippageAmtStr, 3)
feeRateSwapAmt, _ := CheckAmountVerify(moduleInfo.FeeRateSwap, 3)
var amountIn, amountOut *decimal.Decimal
if derection == "exactIn" {
if feeRateSwapAmt.Sign() > 0 {
// with fee
amountInWithFee := tokenInAmt.Mul(decimal.NewDecimal(1000, 3).Sub(feeRateSwapAmt))
amountOut = pool.TickBalance[tokenOutIdx].Mul(amountInWithFee).Div(
pool.TickBalance[tokenInIdx].Mul(decimal.NewDecimal(1000, 3)).Add(amountInWithFee))
} else {
amountOut = pool.TickBalance[tokenOutIdx].Mul(tokenInAmt).Div(
pool.TickBalance[tokenInIdx].Add(tokenInAmt))
}
amountOutMin := tokenOutAmt.Mul(decimal.NewDecimal(1000, 3)).Div(decimal.NewDecimal(1000, 3).Add(slippageAmt))
if amountOut.Cmp(amountOutMin) < 0 {
log.Printf("user[%s], amountOut: %s < expect: %s", f.Address, amountOut, amountOutMin)
return errors.New("swap: slippage error")
}
amountIn = tokenInAmt
} else if derection == "exactOut" {
if feeRateSwapAmt.Sign() > 0 {
// with fee
amountIn = pool.TickBalance[tokenInIdx].Mul(tokenOutAmt.Mul(decimal.NewDecimal(1000, 3))).Div(
pool.TickBalance[tokenOutIdx].Sub(tokenOutAmt).Mul(decimal.NewDecimal(1000, 3).Sub(feeRateSwapAmt))).Add(
decimal.NewDecimal(1, tokenInAmt.Precition))
} else {
amountIn = pool.TickBalance[tokenInIdx].Mul(tokenOutAmt).Div(
pool.TickBalance[tokenOutIdx].Sub(tokenOutAmt)).Add(
decimal.NewDecimal(1, tokenInAmt.Precition))
}
amountInMax := tokenInAmt.Mul(decimal.NewDecimal(1000, 3).Add(slippageAmt))
if amountInMax.Cmp(amountIn) < 0 {
log.Printf("user[%s], amountIn: %s > expect: %s", f.Address, amountIn, amountInMax)
return errors.New("swap: slippage error")
}
amountOut = tokenOutAmt
}
// Check the balance range, prepare to update.
if pool.TickBalance[tokenOutIdx].Cmp(amountOut) < 0 {
return errors.New("swap: pool tokenOut balance insufficient")
}
tokenInBalance := moduleInfo.GetUserTokenBalance(tokenIn, f.PkScript)
tokenOutBalance := moduleInfo.GetUserTokenBalance(tokenOut, f.PkScript)
if tokenInBalance.SwapAccountBalance.Cmp(tokenInAmt) < 0 {
return errors.New(fmt.Sprintf("swap[%s]: user tokenIn balance insufficient: %s < %s",
f.ID,
tokenInBalance.SwapAccountBalance, tokenInAmt))
}
// update balance
// swap sub
pool.TickBalance[tokenOutIdx] = pool.TickBalance[tokenOutIdx].Sub(amountOut)
tokenInBalance.SwapAccountBalance = tokenInBalance.SwapAccountBalance.Sub(amountIn)
// swap add
pool.TickBalance[tokenInIdx] = pool.TickBalance[tokenInIdx].Add(amountIn)
tokenOutBalance.SwapAccountBalance = tokenOutBalance.SwapAccountBalance.Add(amountOut)
// log.Printf("[%s] pool after swap [%s] %s: %s, %s: %s, lp: %s", moduleInfo.ID, poolPair, pool.Tick[0], pool.TickBalance[0], pool.Tick[1], pool.TickBalance[1], pool.LpBalance)
return nil
}

View File

@@ -0,0 +1,284 @@
package indexer
import (
"bytes"
"container/list"
"encoding/base64"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"log"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
"github.com/unisat-wallet/libbrc20-indexer/utils/bip322"
"github.com/btcsuite/btcd/wire"
)
// GetFunctionDataId Calculate ID hash, used for signing.
func GetFunctionDataContent(contentPrefix string, data *model.SwapFunctionData) (content string) {
content = contentPrefix + fmt.Sprintf(`addr: %s
func: %s
params: %s
ts: %d
`, data.Address, data.Function, strings.Join(data.Params, " "), data.Timestamp)
return content
}
func CheckFunctionSigVerify(contentPrefix string, data *model.SwapFunctionData, previous []string) (id string, ok bool) {
if len(previous) != 0 {
contentPrefix += fmt.Sprintf("prevs: %s\n", strings.Join(previous, " "))
}
content := GetFunctionDataContent(contentPrefix, data)
// check id
id = utils.HashString(utils.GetSha256([]byte(content)))
message := GetFunctionDataContent(fmt.Sprintf("id: %s\n", id), data)
signature, err := base64.StdEncoding.DecodeString(data.Signature)
if err != nil {
log.Println("CheckFunctionSigVerify decoding signature:", err)
return id, false
}
var wit wire.TxWitness
lenSignature := len(signature)
if len(signature) == 66 {
wit = wire.TxWitness{signature[2:]}
} else if lenSignature > (2+64+34) && lenSignature <= (2+72+34) {
wit = wire.TxWitness{signature[2 : lenSignature-34], signature[lenSignature-33 : lenSignature]}
} else {
fmt.Println("b64 sig:", hex.EncodeToString(signature))
fmt.Println("pkScript:", hex.EncodeToString([]byte(data.PkScript)))
fmt.Println("b64 sig length invalid")
return id, false
}
// check sig
if ok := bip322.VerifySignature(wit, []byte(data.PkScript), message); !ok {
log.Printf("CheckFunctionSigVerify. content: %s", content)
fmt.Println("sig invalid")
return id, false
}
return id, true
}
// CheckAmountVerify Verify the legality of the brc20 tick amt.
func CheckAmountVerify(amtStr string, nDecimal uint8) (amt *decimal.Decimal, ok bool) {
// check amount
amt, err := decimal.NewDecimalFromString(amtStr, int(nDecimal))
if err != nil {
return nil, false
}
if amt.Sign() < 0 {
return nil, false
}
return amt, true
}
// CheckTickVerify Verify the legality of the brc20 tick amt.
func (g *BRC20ModuleIndexer) CheckTickVerify(tick string, amtStr string) (amt *decimal.Decimal, ok bool) {
uniqueLowerTicker := strings.ToLower(tick)
tokenInfo, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]
if !ok {
return
}
if amtStr == "" {
return nil, true
}
tinfo := tokenInfo.Deploy
// check amount
amt, err := decimal.NewDecimalFromString(amtStr, int(tinfo.Decimal))
if err != nil {
return nil, false
}
if amt.Sign() < 0 || amt.Cmp(tinfo.Max) > 0 {
return nil, false
}
return amt, true
}
// CheckTickVerify Verify the legality of the brc20 tick amt.
func (g *BRC20ModuleIndexer) CheckTickVerifyBigInt(tick string, amtStr string) (amt *decimal.Decimal, ok bool) {
uniqueLowerTicker := strings.ToLower(tick)
tokenInfo, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]
if !ok {
return
}
if amtStr == "" {
return nil, true
}
tinfo := tokenInfo.Deploy
// check amount
amt, err := decimal.NewDecimalFromString(amtStr, 0)
if err != nil {
return nil, false
}
amt.Precition = uint(tinfo.Decimal)
if amt.Sign() < 0 || amt.Cmp(tinfo.Max) > 0 {
return nil, false
}
return amt, true
}
func GetLowerInnerPairNameByToken(token0, token1 string) (poolPair string) {
token0 = strings.ToLower(token0)
token1 = strings.ToLower(token1)
if token0 > token1 {
poolPair = fmt.Sprintf("%s/%s", token1, token0)
} else {
poolPair = fmt.Sprintf("%s/%s", token0, token1)
}
return poolPair
}
func GetLowerPairNameByToken(token0, token1 string) (poolPair string) {
token0 = strings.ToLower(token0)
token1 = strings.ToLower(token1)
poolPair = fmt.Sprintf("%s/%s", token0, token1)
return poolPair
}
// GetEachItemLengthOfCommitJsonData Get the actual number of bytes occupied by obj in the data list
func GetEachItemLengthOfCommitJsonData(body []byte) (results []uint64, err error) {
decoder := json.NewDecoder(bytes.NewReader(body))
const (
TOKEN_TYPE_OBJ = iota
TOKEN_TYPE_ARR
)
curType := -1
const (
TOKEN_VALUE_MAPKEY = iota
TOKEN_VALUE_MAPVALUE
TOKEN_VALUE_ARRAY_ELEMENT
)
curEle := -1
indentLevel := 0
stack := list.New()
setEleType := func() {
switch curType {
case TOKEN_TYPE_OBJ:
curEle = TOKEN_VALUE_MAPKEY
case TOKEN_TYPE_ARR:
curEle = TOKEN_VALUE_ARRAY_ELEMENT
}
}
readyDataProcess := false
startDataProcess := false
var lastPos uint64
for {
tok, err := decoder.Token()
// Return the next unprocessed token.
if err == io.EOF {
break
} else if err != nil {
return nil, err
}
offset := decoder.InputOffset()
switch tok := tok.(type) {
// Based on the token type, appropriate processing is performed.
case json.Delim:
switch tok {
case '{':
if indentLevel == 2 && readyDataProcess && startDataProcess {
// Step 3: Record start offset at '{' character.
lastPos = uint64(offset)
}
stack.PushBack(TOKEN_TYPE_OBJ)
curType = TOKEN_TYPE_OBJ
setEleType()
indentLevel += 1
case '}':
if indentLevel == 3 && readyDataProcess && startDataProcess {
// Step 4: Record length at '}' character.
results = append(results, uint64(offset)-lastPos+1)
}
stack.Remove(stack.Back())
if stack.Len() > 0 {
curType = stack.Back().Value.(int)
setEleType()
}
indentLevel -= 1
case '[':
if indentLevel == 1 && readyDataProcess && !startDataProcess {
// Step 2: Start formally counting after '['.
results = nil
startDataProcess = true
}
stack.PushBack(TOKEN_TYPE_ARR)
curType = TOKEN_TYPE_ARR
setEleType()
indentLevel += 1
case ']':
if indentLevel == 2 && readyDataProcess && startDataProcess {
// Step 5: End the statistics after ']'.
readyDataProcess = false
startDataProcess = false
}
stack.Remove(stack.Back())
if stack.Len() > 0 {
curType = stack.Back().Value.(int)
setEleType()
}
indentLevel -= 1
}
default:
switch curType {
case TOKEN_TYPE_OBJ:
switch curEle {
case TOKEN_VALUE_MAPKEY:
if indentLevel == 1 {
if tok == "data" {
// Step 1: Mark the data start, and initialize the marker and result variables.
results = nil
readyDataProcess = true
startDataProcess = false
} else {
// Step 6: Mark complete.
readyDataProcess = false
startDataProcess = false
}
}
curEle = TOKEN_VALUE_MAPVALUE
case TOKEN_VALUE_MAPVALUE:
curEle = TOKEN_VALUE_MAPKEY
}
}
}
}
return results, nil
}

View File

@@ -0,0 +1,743 @@
package indexer
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"log"
"os"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
var GResultsExternal []*model.SwapFunctionResultCheckState
func InitResultDataFromFile(fname string) (err error) {
// Open our jsonFile
jsonFile, err := os.Open(fname)
// if we os.Open returns an error then handle it
if err != nil {
fmt.Println(err)
return err
}
// defer the closing of our jsonFile so that we can parse it later on
defer jsonFile.Close()
byteValue, err := ioutil.ReadAll(jsonFile)
if err != nil {
return err
}
err = json.Unmarshal([]byte(byteValue), &GResultsExternal)
if err != nil {
return err
}
return nil
}
func (g *BRC20ModuleIndexer) BRC20ModulePrepareSwapCommitContent(
commitsStr []string,
commitsObj []*model.InscriptionBRC20ModuleSwapCommitContent) {
total := len(commitsStr)
if total < 2 {
return
}
for idx, commitStr := range commitsStr[:total-1] {
nextCommitObj := commitsObj[idx+1]
if _, ok := g.InscriptionsValidCommitMapById[nextCommitObj.Parent]; ok {
continue
}
data := &model.InscriptionBRC20Data{
InscriptionId: nextCommitObj.Parent,
ContentBody: []byte(commitStr),
}
g.InscriptionsValidCommitMapById[nextCommitObj.Parent] = data
}
}
func (g *BRC20ModuleIndexer) BRC20ModuleVerifySwapCommitContent(
commitStr string,
commitObj *model.InscriptionBRC20ModuleSwapCommitContent,
results []*model.SwapFunctionResultCheckState) (idx int, critical bool, err error) {
if len(commitObj.Data) != len(results) {
return -1, false, errors.New("commit verify, function results different size")
}
idx, err = g.ProcessInscribeCommitPreVerify(commitObj)
if err != nil {
log.Printf("commit verify failed: inscribe pre function[%d] %s", idx, err)
return idx, true, err
}
// Verifying commit that was not moved in the middle
// check module exist
moduleInfo, ok := g.ModulesInfoMap[commitObj.Module]
if !ok {
return -1, true, errors.New("commit, module not exist")
}
parentId := commitObj.Parent
// invalid if parent commit not exist
commitIdsToCheck := []string{}
for parentId != "" {
if _, ok := moduleInfo.CommitIdMap[parentId]; ok {
break
}
commitIdsToCheck = append([]string{parentId}, commitIdsToCheck...)
parentCommitData, ok := g.InscriptionsValidCommitMapById[parentId]
if !ok {
return -1, false, errors.New("commit, parent body missing")
}
parentId, err = GetCommitParentFromData(parentCommitData)
if err != nil {
return -1, true, errors.New("commit, parent json invalid")
}
}
for _, parentId := range commitIdsToCheck {
parentCommitData, ok := g.InscriptionsValidCommitMapById[parentId]
if !ok {
return -1, false, errors.New("commit, parent body not ready")
}
if idx, err := g.ProcessCommitCheck(parentCommitData); err != nil {
return idx, true, err
}
}
// verify current commit
eachFuntionSize, err := GetEachItemLengthOfCommitJsonData([]byte(commitStr))
if err != nil {
return -1, true, errors.New("commit, get function size failed")
}
idx, critical, err = g.ProcessCommitVerify("", commitObj, eachFuntionSize, results)
if err != nil {
log.Printf("commit verify failed, send function[%d] invalid", idx)
return idx, critical, err
}
return 0, false, nil
}
func (g *BRC20ModuleIndexer) BRC20ResultsPreVerify(moduleInfo *model.BRC20ModuleSwapInfo, result *model.SwapFunctionResultCheckState) (err error) {
// check user amt format
for idxUser, user := range result.Users {
userPkScript := constant.ZERO_ADDRESS_PKSCRIPT
// format check
if user.Address != "0" {
if pk, err := utils.GetPkScriptByAddress(user.Address, conf.GlobalNetParams); err != nil {
return errors.New(fmt.Sprintf("result users[%d] addr(%s) invalid", idxUser, user.Address))
} else {
userPkScript = string(pk)
}
}
if len(user.Tick) == 4 {
tokenAmt, ok := g.CheckTickVerify(user.Tick, user.Balance)
if !ok {
return errors.New(fmt.Sprintf("result users[%d] balance invalid", idxUser))
}
// balance check
tokenBalance := moduleInfo.GetUserTokenBalance(user.Tick, userPkScript)
if tokenBalance.SwapAccountBalance.Cmp(tokenAmt) != 0 {
return errors.New(fmt.Sprintf("result users[%d] %s amount not match (%s != %s)",
idxUser, user.Tick,
tokenAmt.String(),
tokenBalance.SwapAccountBalance.String(),
))
}
} else {
token0, token1, err := utils.DecodeTokensFromSwapPair(user.Tick)
if err != nil {
return errors.New(fmt.Sprintf("result users[%d] tick invalid", idxUser))
}
if _, ok := g.CheckTickVerify(token0, ""); !ok {
return errors.New(fmt.Sprintf("result users[%d] tick/0 invalid", idxUser))
}
if _, ok := g.CheckTickVerify(token1, ""); !ok {
return errors.New(fmt.Sprintf("result users[%d] tick/1 invalid", idxUser))
}
lpAmt, ok := CheckAmountVerify(user.Balance, 18)
if !ok {
return errors.New(fmt.Sprintf("result users[%d] Lp Amount invalid", idxUser))
}
// balance check
poolPair := GetLowerInnerPairNameByToken(token0, token1)
usersLpBalanceInPool, ok := moduleInfo.LPTokenUsersBalanceMap[poolPair]
if !ok {
return errors.New(fmt.Sprintf("result users[%d] Pair invalid", idxUser))
}
lpBalance := usersLpBalanceInPool[userPkScript]
if lpBalance.Cmp(lpAmt) != 0 {
return errors.New(fmt.Sprintf("result users[%d] %s lp balance not match", idxUser, poolPair))
}
}
}
// check pool amt format
for idxPool, poolResult := range result.Pools {
// format check
token0, token1, err := utils.DecodeTokensFromSwapPair(poolResult.Pair)
if err != nil {
return errors.New(fmt.Sprintf("result pools[%d] Pair invalid", idxPool))
}
token0Amt, ok := g.CheckTickVerify(token0, poolResult.ReserveAmount0)
if !ok {
return errors.New(fmt.Sprintf("result pools[%d] Amount0 invalid", idxPool))
}
token1Amt, ok := g.CheckTickVerify(token1, poolResult.ReserveAmount1)
if !ok {
return errors.New(fmt.Sprintf("result pools[%d] Amount1 invalid", idxPool))
}
lpAmt, ok := CheckAmountVerify(poolResult.LPAmount, 18)
if !ok {
return errors.New(fmt.Sprintf("result pools[%d] Lp Amount invalid", idxPool))
}
// balance check
poolPair := GetLowerInnerPairNameByToken(token0, token1)
pool, ok := moduleInfo.SwapPoolTotalBalanceDataMap[poolPair]
if !ok {
return errors.New(fmt.Sprintf("result pools[%d] missing pair[%s]", idxPool, poolPair))
}
// Determine the token order id of the pool
var token0Idx, token1Idx int
if token0 == pool.Tick[0] {
token0Idx = 0
token1Idx = 1
} else {
token0Idx = 1
token1Idx = 0
}
if token0Amt.Cmp(pool.TickBalance[token0Idx]) != 0 {
return errors.New(fmt.Sprintf("result pool[%d] %s balance not match", idxPool, pool.Tick[token0Idx]))
}
if token1Amt.Cmp(pool.TickBalance[token1Idx]) != 0 {
return errors.New(fmt.Sprintf("result pool[%d] %s balance not match", idxPool, pool.Tick[token1Idx]))
}
lpAmt.Precition = 18
if lpAmt.Cmp(pool.LpBalance) != 0 {
return errors.New(fmt.Sprintf("result pool[%d] %s lpbalance not match", idxPool, poolPair))
}
}
return nil
}
// ProcessInscribeCommit Created a commit, but it has not yet taken effect.
func (g *BRC20ModuleIndexer) ProcessInscribeCommitPreVerify(body *model.InscriptionBRC20ModuleSwapCommitContent) (index int, err error) {
if body.Module != strings.ToLower(body.Module) {
return -1, errors.New("module id invalid")
}
// check module exist
moduleInfo, ok := g.ModulesInfoMap[body.Module]
if !ok {
return -1, errors.New("module invalid")
}
// check gasPrice
if _, ok := g.CheckTickVerify(moduleInfo.GasTick, body.GasPrice); !ok {
log.Printf("ProcessInscribeCommit commit gas err: %s", body.GasPrice)
return -1, errors.New("gas price invalid")
}
// common content
content := fmt.Sprintf("module: %s\n", moduleInfo.ID)
if body.Parent != "" {
content += fmt.Sprintf("parent: %s\n", body.Parent)
}
if body.GasPrice != "" {
content += fmt.Sprintf("gas_price: %s\n", body.GasPrice)
}
// for previous id
functionsByAddressMap := make(map[string][]string)
for idx, f := range body.Data {
if pkScript, err := utils.GetPkScriptByAddress(f.Address, conf.GlobalNetParams); err != nil {
return idx, errors.New("addr invalid")
} else {
f.PkScript = string(pkScript)
}
// log.Printf("ProcessInscribeCommitPreVerify func[%d] %s(%s)", idx, f.Function, strings.Join(f.Params, ", "))
// get prevouse function id by user
previous := functionsByAddressMap[f.Address]
if id, ok := CheckFunctionSigVerify(content, f, previous); !ok {
return idx, errors.New(fmt.Sprintf("function[%d]%s sig invalid", idx, id))
} else {
// update previous id list
previous = append(previous, id)
functionsByAddressMap[f.Address] = previous
}
// function process
if f.Function == constant.BRC20_SWAP_FUNCTION_DEPLOY_POOL {
if len(f.Params) != 2 {
return idx, errors.New("func: deploy params invalid")
}
token0 := f.Params[0]
token1 := f.Params[1]
if token0 == token1 {
return idx, errors.New("func: deploy same tokens")
}
if _, ok := g.InscriptionsTickerInfoMap[strings.ToLower(token0)]; !ok {
return idx, errors.New("func: deploy tick0 invalid")
}
if _, ok := g.InscriptionsTickerInfoMap[strings.ToLower(token1)]; !ok {
return idx, errors.New("func: deploy tick1 invalid")
}
// Check for duplicate pairs when the Commit inscription effect is applied.
} else if f.Function == constant.BRC20_SWAP_FUNCTION_ADD_LIQ {
if len(f.Params) != 5 {
return idx, errors.New("func: addLiq params invalid")
}
token0, token1, err := utils.DecodeTokensFromSwapPair(f.Params[0])
if err != nil {
return idx, errors.New("func: addLiq poolPair invalid")
}
token0AmtStr := f.Params[1]
token1AmtStr := f.Params[2]
tokenLpAmtStr := f.Params[3]
slippage := f.Params[4]
if _, ok := g.CheckTickVerify(token0, token0AmtStr); !ok {
return idx, errors.New("func: addLiq amt0 invalid")
}
if _, ok := g.CheckTickVerify(token1, token1AmtStr); !ok {
return idx, errors.New("func: addLiq amt1 invalid")
}
if _, ok := CheckAmountVerify(tokenLpAmtStr, 18); !ok {
return idx, errors.New("func: addLiq amtLp invalid")
}
if _, ok := CheckAmountVerify(slippage, 3); !ok {
return idx, errors.New("func: addLiq slippage invalid")
}
} else if f.Function == constant.BRC20_SWAP_FUNCTION_REMOVE_LIQ {
if len(f.Params) != 5 {
return idx, errors.New("func: removeLiq params invalid")
}
token0, token1, err := utils.DecodeTokensFromSwapPair(f.Params[0])
if err != nil {
return idx, errors.New("func: removeLiq poolPair invalid")
}
tokenLpAmtStr := f.Params[1]
token0AmtStr := f.Params[2]
token1AmtStr := f.Params[3]
slippage := f.Params[4]
if _, ok := CheckAmountVerify(tokenLpAmtStr, 18); !ok {
return idx, errors.New(fmt.Sprintf("func: removeLiq amtLp invalid, %s", f.Params[1]))
}
if _, ok := g.CheckTickVerify(token0, token0AmtStr); !ok {
return idx, errors.New("func: removeLiq amt0 invalid")
}
if _, ok := g.CheckTickVerify(token1, token1AmtStr); !ok {
return idx, errors.New("func: removeLiq amt1 invalid")
}
if _, ok := CheckAmountVerify(slippage, 3); !ok {
return idx, errors.New("func: removeLiq slippage invalid")
}
} else if f.Function == constant.BRC20_SWAP_FUNCTION_SWAP {
if len(f.Params) != 6 {
return idx, errors.New("func: swap params invalid")
}
token0, token1, err := utils.DecodeTokensFromSwapPair(f.Params[0])
if err != nil {
return idx, errors.New("func: swap poolPair invalid")
}
// Check that the first parameter must be one of the token pairs.
if token := f.Params[1]; token != token0 && token != token1 {
return idx, errors.New("func: swap token invalid")
}
derection := f.Params[3]
if derection != "exactIn" && derection != "exactOut" {
return idx, errors.New("func: swap derection invalid")
}
var tokenIn, tokenInAmtStr, tokenOut, tokenOutAmtStr string
if derection == "exactIn" {
tokenIn = f.Params[1]
tokenInAmtStr = f.Params[2]
if tokenIn == token0 {
tokenOut = token1
} else {
tokenOut = token0
}
tokenOutAmtStr = f.Params[4]
} else if derection == "exactOut" {
tokenOut = f.Params[1]
tokenOutAmtStr = f.Params[2]
if tokenOut == token0 {
tokenIn = token1
} else {
tokenIn = token0
}
tokenInAmtStr = f.Params[4]
}
if _, ok := g.CheckTickVerify(tokenIn, tokenInAmtStr); !ok {
return idx, errors.New("func: swap token amount invalid")
}
if _, ok := g.CheckTickVerify(tokenOut, tokenOutAmtStr); !ok {
return idx, errors.New("func: swap amt1 invalid")
}
slippage := f.Params[5]
if _, ok := CheckAmountVerify(slippage, 3); !ok {
return idx, errors.New("func: swap slippage invalid")
}
} else if f.Function == constant.BRC20_SWAP_FUNCTION_DECREASE_APPROVAL {
if len(f.Params) != 2 {
return idx, errors.New("func: decrease approval params invalid")
}
token := f.Params[0]
tokenAmtStr := f.Params[1]
if _, ok := g.CheckTickVerify(token, tokenAmtStr); !ok {
return idx, errors.New("func: decrease approval amt invalid")
}
} else if f.Function == constant.BRC20_SWAP_FUNCTION_SEND {
if len(f.Params) != 3 {
return idx, errors.New("func: send params invalid")
}
addressTo := f.Params[0]
if _, err := utils.GetPkScriptByAddress(addressTo, conf.GlobalNetParams); err != nil {
return idx, errors.New("send addr invalid")
}
tokenOrPair := f.Params[1]
tokenAmtStr := f.Params[2]
if len(tokenOrPair) == 4 {
if _, ok := g.CheckTickVerify(tokenOrPair, tokenAmtStr); !ok {
return idx, errors.New("func: send amt invalid")
}
} else {
if _, _, err := utils.DecodeTokensFromSwapPair(tokenOrPair); err != nil {
return idx, errors.New("func: send lp invalid")
}
if _, ok := CheckAmountVerify(tokenAmtStr, 18); !ok {
return idx, errors.New(fmt.Sprintf("func: send amtLp invalid, %s", tokenAmtStr))
}
}
} else {
log.Printf("ProcessInscribeCommit commit[%d] invalid function: %s. id: %s", idx, f.Function, f.ID)
return idx, errors.New("func invalid")
}
}
return 0, nil
}
func (g *BRC20ModuleIndexer) ProcessCommitVerify(commitId string, body *model.InscriptionBRC20ModuleSwapCommitContent,
eachFuntionSize []uint64, results []*model.SwapFunctionResultCheckState) (index int, critical bool, err error) {
// check module exist
moduleInfo, ok := g.ModulesInfoMap[body.Module]
if !ok {
return -1, true, errors.New("commit, module not exist")
}
// check empty parent
if body.Parent == "" {
if len(moduleInfo.CommitIdMap) > 0 {
return -1, true, errors.New("commit, missing parent")
}
} else {
// invalid if reusing 'parent'
if _, ok := moduleInfo.CommitIdChainMap[body.Parent]; ok {
return -1, true, errors.New("commit, parent already sattled")
}
// invalid if parent commit not exist
if _, ok := moduleInfo.CommitIdMap[body.Parent]; !ok {
return -1, true, errors.New("commit, parent invalid")
}
}
gasPriceAmt, _ := g.CheckTickVerify(moduleInfo.GasTick, body.GasPrice)
if len(body.Data) != len(eachFuntionSize) {
return -1, true, errors.New("commit, function size not match data")
}
for idx, f := range body.Data {
if pkScript, err := utils.GetPkScriptByAddress(f.Address, conf.GlobalNetParams); err != nil {
return idx, true, errors.New("commit, addr invalid")
} else {
f.PkScript = string(pkScript)
}
// gas fee
if gasPriceAmt.Sign() > 0 {
size := eachFuntionSize[idx]
gasAmt := gasPriceAmt.Mul(decimal.NewDecimal(size, 3))
// log.Printf("process commit[%d] size: %d, gas fee: %s, module[%s]", idx, size, gasAmt.String(), body.Module)
if err := g.ProcessCommitFunctionGasFee(moduleInfo, f.PkScript, gasAmt); err != nil { // has update
log.Printf("process commit[%d] gas failed: %s", idx, err)
return idx, true, err
}
}
// functions
if f.Function == constant.BRC20_SWAP_FUNCTION_DEPLOY_POOL {
if err := g.ProcessCommitFunctionDeployPool(moduleInfo, f); err != nil {
log.Printf("process commit[%d] deploy pool failed: %s, module[%s]", idx, err, body.Module)
return idx, true, err
}
} else if f.Function == constant.BRC20_SWAP_FUNCTION_ADD_LIQ {
if err := g.ProcessCommitFunctionAddLiquidity(moduleInfo, f); err != nil {
log.Printf("process commit[%d] add liq failed: %s, module[%s]", idx, err, body.Module)
return idx, true, err
}
} else if f.Function == constant.BRC20_SWAP_FUNCTION_REMOVE_LIQ {
if err := g.ProcessCommitFunctionRemoveLiquidity(moduleInfo, f); err != nil {
log.Printf("process commit[%d] remove liq failed: %s, module[%s]", idx, err, body.Module)
return idx, true, err
}
} else if f.Function == constant.BRC20_SWAP_FUNCTION_SWAP {
if err := g.ProcessCommitFunctionSwap(moduleInfo, f); err != nil {
log.Printf("process commit[%d] swap failed: %s, module[%s]", idx, err, body.Module)
return idx, true, err
}
} else if f.Function == constant.BRC20_SWAP_FUNCTION_DECREASE_APPROVAL {
if err := g.ProcessCommitFunctionDecreaseApproval(moduleInfo, f); err != nil {
log.Printf("process commit[%d] decrease approval failed: %s, module[%s]", idx, err, body.Module)
return idx, true, err
}
} else if f.Function == constant.BRC20_SWAP_FUNCTION_SEND {
if err := g.ProcessCommitFunctionSend(moduleInfo, f); err != nil {
log.Printf("process commit[%d] send failed: %s, module[%s]", idx, err, body.Module)
return idx, true, err
}
}
// instant verify
if len(results) == len(body.Data) {
if err = g.BRC20ResultsPreVerify(moduleInfo, results[idx]); err != nil {
log.Printf("commit verify failed: result[%d] %s", idx, err)
return idx, false, err
}
}
// verify test result
if GResultsExternal == nil {
continue
}
for _, result := range GResultsExternal {
if result.CommitId != commitId {
continue
}
if result.FunctionIdx != idx {
continue
}
if err = g.BRC20ResultsPreVerify(moduleInfo, result); err != nil {
log.Printf("commit verify failed: result[%d] %s", idx, err)
return idx, false, err
}
}
}
return 0, false, nil
}
func (g *BRC20ModuleIndexer) InitCherryPickFilter(body *model.InscriptionBRC20ModuleSwapCommitContent, pickUsersPkScript, pickTokensTick, pickPoolsPair map[string]bool) (index int, err error) {
// check module exist
moduleInfo, ok := g.ModulesInfoMap[body.Module]
if !ok {
return -1, errors.New("module invalid")
}
pickUsersPkScript[string(moduleInfo.GasToPkScript)] = true
pickUsersPkScript[string(moduleInfo.LpFeePkScript)] = true
pickUsersPkScript[string(moduleInfo.SequencerPkScript)] = true
pickUsersPkScript[string(moduleInfo.DeployerPkScript)] = true
pickTokensTick[moduleInfo.GasTick] = true
for idx, f := range body.Data {
if pkScript, err := utils.GetPkScriptByAddress(f.Address, conf.GlobalNetParams); err != nil {
return idx, errors.New("addr invalid")
} else {
pickUsersPkScript[string(pkScript)] = true
}
// function process
if f.Function == constant.BRC20_SWAP_FUNCTION_DEPLOY_POOL {
if len(f.Params) != 2 {
return idx, errors.New("func: deploy params invalid")
}
token0 := f.Params[0]
token1 := f.Params[1]
// pair
poolPair := GetLowerInnerPairNameByToken(token0, token1)
pickPoolsPair[poolPair] = true
// tick
token0 = strings.ToLower(token0)
token1 = strings.ToLower(token1)
pickTokensTick[token0] = true
pickTokensTick[token1] = true
} else if f.Function == constant.BRC20_SWAP_FUNCTION_ADD_LIQ {
if len(f.Params) != 5 {
return idx, errors.New("func: addLiq params invalid")
}
token0, token1, err := utils.DecodeTokensFromSwapPair(f.Params[0])
if err != nil {
return idx, errors.New("func: addLiq poolPair invalid")
}
// pair
poolPair := GetLowerInnerPairNameByToken(token0, token1)
pickPoolsPair[poolPair] = true
// tick
token0 = strings.ToLower(token0)
token1 = strings.ToLower(token1)
pickTokensTick[token0] = true
pickTokensTick[token1] = true
} else if f.Function == constant.BRC20_SWAP_FUNCTION_REMOVE_LIQ {
if len(f.Params) != 5 {
return idx, errors.New("func: removeLiq params invalid")
}
token0, token1, err := utils.DecodeTokensFromSwapPair(f.Params[0])
if err != nil {
return idx, errors.New("func: removeLiq poolPair invalid")
}
// pair
poolPair := GetLowerInnerPairNameByToken(token0, token1)
pickPoolsPair[poolPair] = true
// tick
token0 = strings.ToLower(token0)
token1 = strings.ToLower(token1)
pickTokensTick[token0] = true
pickTokensTick[token1] = true
} else if f.Function == constant.BRC20_SWAP_FUNCTION_SWAP {
if len(f.Params) != 6 {
return idx, errors.New("func: swap params invalid")
}
token0, token1, err := utils.DecodeTokensFromSwapPair(f.Params[0])
if err != nil {
return idx, errors.New("func: swap poolPair invalid")
}
// pair
poolPair := GetLowerInnerPairNameByToken(token0, token1)
pickPoolsPair[poolPair] = true
// tick
token0 = strings.ToLower(token0)
token1 = strings.ToLower(token1)
pickTokensTick[token0] = true
pickTokensTick[token1] = true
} else if f.Function == constant.BRC20_SWAP_FUNCTION_DECREASE_APPROVAL {
if len(f.Params) != 2 {
return idx, errors.New("func: decrease approval params invalid")
}
token0 := f.Params[0]
token0 = strings.ToLower(token0)
pickTokensTick[token0] = true
} else if f.Function == constant.BRC20_SWAP_FUNCTION_SEND {
if len(f.Params) != 3 {
return idx, errors.New("func: send params invalid")
}
addressTo := f.Params[0]
if pk, err := utils.GetPkScriptByAddress(addressTo, conf.GlobalNetParams); err != nil {
return idx, errors.New("send addr invalid")
} else {
pickUsersPkScript[string(pk)] = true
}
tokenOrPair := f.Params[1]
if len(tokenOrPair) == 4 {
token0 := strings.ToLower(tokenOrPair)
pickTokensTick[token0] = true
} else {
if token0, token1, err := utils.DecodeTokensFromSwapPair(tokenOrPair); err != nil {
return idx, errors.New("func: send lp invalid")
} else {
poolPair := GetLowerInnerPairNameByToken(token0, token1)
pickPoolsPair[poolPair] = true
// tick
token0 = strings.ToLower(token0)
token1 = strings.ToLower(token1)
pickTokensTick[token0] = true
pickTokensTick[token1] = true
}
}
} else {
log.Printf("ProcessInscribeCommit commit[%d] invalid function: %s. id: %s", idx, f.Function, f.ID)
return idx, errors.New("func invalid")
}
}
return 0, nil
}

View File

@@ -0,0 +1,316 @@
package indexer
import (
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"log"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20ModuleIndexer) GetConditionalApproveInfoByKey(createIdxKey string) (
approveInfo *model.InscriptionBRC20SwapConditionalApproveInfo, isInvalid bool) {
var ok bool
// approve
approveInfo, ok = g.InscriptionsValidConditionalApproveMap[createIdxKey]
if !ok {
approveInfo, ok = g.InscriptionsInvalidConditionalApproveMap[createIdxKey]
if !ok {
approveInfo = nil
}
isInvalid = true
}
return approveInfo, isInvalid
}
func (g *BRC20ModuleIndexer) ProcessConditionalApprove(data *model.InscriptionBRC20Data, approveInfo *model.InscriptionBRC20SwapConditionalApproveInfo, isInvalid bool) error {
inscriptionId := approveInfo.Data.GetInscriptionId()
log.Printf("parse move approve. inscription id: %s", inscriptionId)
// ticker
uniqueLowerTicker := strings.ToLower(approveInfo.Tick)
if _, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]; !ok {
return errors.New("approve, invalid ticker")
}
moduleInfo, ok := g.ModulesInfoMap[approveInfo.Module]
if !ok {
log.Printf("ProcessBRC20ConditionalApprove send approve, but module invalid. txid: %s",
hex.EncodeToString(utils.ReverseBytes([]byte(data.TxId))),
)
return errors.New("approve, module invalid")
}
// global invalid history
if isInvalid {
// global history
history := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_SWAP_TYPE_N_CONDITIONAL_APPROVE, approveInfo.Data, data, nil, !isInvalid)
moduleInfo.History = append(moduleInfo.History, history)
return nil
}
var amt *decimal.Decimal
var events []*model.ConditionalApproveEvent
// First move
// If sent to self or if the fee is deducted, all balances will be directly refunded to the user's swap balance, and the inscription balance will be used up and become void
// If sent to another party, mark current receiver as an agent; if the address cannot be unlocked, it will cause the balance to be stuck, money loss needs to be borne by the user.
// Multiple moves
// If the sending address and the receiving address are different, or if the fee is deducted, all balances will be directly refunded to the user's swap balance, and the inscription balance will be used up and become void
// If the sending address and the receiving address are the same, carry out transfer scanning
//
// In the same transaction, it's possible to deposit repeatedly into different instances of the same type of module, or deposit into different modules
// Although this causes the issue of repeated deposits, it can maintain complete independence between module instances.
senderPkScript := string(approveInfo.Data.PkScript)
receiverPkScript := string(data.PkScript)
if !approveInfo.HasMoved {
approveInfo.HasMoved = true
if data.Satoshi == 0 || senderPkScript == receiverPkScript {
receiverPkScript = senderPkScript
amt = approveInfo.Amount
approveInfo.Balance = nil
log.Printf("generate new approve event return self with out move, id: %s", inscriptionId)
log.Printf("generate new approve event amt: %s", amt.String())
// Returned directly from the start
event := model.NewConditionalApproveEvent(senderPkScript, receiverPkScript, amt, approveInfo.Balance, data, approveInfo, "", "")
events = append(events, event)
} else if senderPkScript != receiverPkScript {
approveInfo.DelegatorPkScript = receiverPkScript
return nil
} // no else
} else {
senderPkScript = approveInfo.DelegatorPkScript
if data.Satoshi == 0 || senderPkScript != receiverPkScript {
senderPkScript = approveInfo.OwnerPkScript
receiverPkScript = senderPkScript
amt = approveInfo.Balance
approveInfo.Balance = nil
log.Printf("generate new approve event return self after move, id: %s", inscriptionId)
log.Printf("generate new approve event amt: %s", amt.String())
// Subsequent direct return
event := model.NewConditionalApproveEvent(senderPkScript, receiverPkScript, amt, approveInfo.Balance, data, approveInfo, "", "")
events = append(events, event)
} else if senderPkScript == receiverPkScript {
if g.ThisTxId != data.TxId {
g.TxStaticTransferStatesForConditionalApprove = nil
g.ThisTxId = data.TxId
}
events = g.GenerateApproveEventsByApprove(approveInfo.OwnerPkScript, approveInfo.Balance,
data, approveInfo)
} // no else
}
return g.ProcessConditionalApproveEvents(events)
}
func (g *BRC20ModuleIndexer) ProcessConditionalApproveEvents(events []*model.ConditionalApproveEvent) error {
// produce conditional approve events
for _, event := range events {
inscriptionId := event.FromData.GetInscriptionId()
// from address
addressFrom, err := utils.GetAddressFromScript([]byte(event.From), conf.GlobalNetParams)
if err != nil {
addressFrom = hex.EncodeToString([]byte(event.From))
}
// to address
addressTo, err := utils.GetAddressFromScript([]byte(event.To), conf.GlobalNetParams)
if err != nil {
addressTo = hex.EncodeToString([]byte(event.From))
}
log.Printf("process approve event. inscription id: %s, from: %s, to: %s, amt: %s, balance: %s",
inscriptionId,
addressFrom,
addressTo,
event.Amount.String(),
event.Balance.String())
// ticker
uniqueLowerTicker := strings.ToLower(event.Tick)
if _, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]; !ok {
return errors.New("approve event, invalid ticker")
}
moduleInfo, ok := g.ModulesInfoMap[event.Module]
if !ok {
return errors.New("approve event, module invalid")
}
// global history
data := &model.BRC20SwapHistoryCondApproveData{
Tick: event.Tick,
Amount: event.Amount.String(),
Balance: event.Balance.String(),
TransferInscriptionId: event.TransferInscriptionId,
TransferMax: event.TransferMax,
}
history := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_SWAP_TYPE_N_CONDITIONAL_APPROVE, &event.FromData, &event.ToData, data, true)
moduleInfo.History = append(moduleInfo.History, history)
// from
// get user's tokens to update
fromUserTokens, ok := moduleInfo.UsersTokenBalanceDataMap[string(event.From)]
if !ok {
log.Printf("ProcessBRC20ConditionalApprove send from user missing. height: %d, txidx: %d",
event.ToData.Height,
event.ToData.TxIdx,
)
return errors.New("approve, send from user missing")
}
// get tokenBalance to update
fromTokenBalance, ok := fromUserTokens[uniqueLowerTicker]
if !ok {
log.Printf("ProcessBRC20ConditionalApprove send from ticker missing. height: %d, txidx: %d",
event.ToData.Height,
event.ToData.TxIdx,
)
return errors.New("approve, send from ticker missing")
}
// Cross-check whether the approve inscription exists.
if _, ok := fromTokenBalance.ValidConditionalApproveMap[event.ToData.CreateIdxKey]; !ok {
log.Printf("ProcessBRC20ConditionalApprove send from approve missing(dup approve?). height: %d, txidx: %d",
event.ToData.Height,
event.ToData.TxIdx,
)
return errors.New("approve, send from approve missing(dup)")
}
// to
tokenBalance := moduleInfo.GetUserTokenBalance(event.Tick, event.To)
// set from
fromTokenBalance.CondApproveableBalance = fromTokenBalance.CondApproveableBalance.Sub(event.Amount)
// delete(fromTokenBalance.ValidConditionalApproveMap, data.CreateIdxKey)
// fixme: history.Data
fromHistory := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_SWAP_TYPE_N_APPROVE_FROM, &event.FromData, &event.ToData, nil, true)
fromTokenBalance.History = append(fromTokenBalance.History, fromHistory)
// set to
if event.ToData.BlockTime > 0 {
tokenBalance.SwapAccountBalanceSafe = tokenBalance.SwapAccountBalanceSafe.Add(event.Amount)
}
tokenBalance.SwapAccountBalance = tokenBalance.SwapAccountBalance.Add(event.Amount)
// fixme: history.Data
toHistory := model.NewBRC20ModuleHistory(true, constant.BRC20_HISTORY_SWAP_TYPE_N_APPROVE_TO, &event.FromData, &event.ToData, nil, true)
tokenBalance.History = append(tokenBalance.History, toHistory)
// record state
stateBalance := moduleInfo.GetTickConditionalApproveStateBalance(event.Tick)
if event.From == event.To {
stateBalance.BalanceCancelApprove = stateBalance.BalanceCancelApprove.Add(event.Amount)
} else {
stateBalance.BalanceApprove = stateBalance.BalanceApprove.Add(event.Amount)
}
}
for _, event := range events {
event.ApproveInfo.Balance = event.Balance
}
return nil
}
func (g *BRC20ModuleIndexer) ProcessInscribeConditionalApprove(data *model.InscriptionBRC20Data) error {
var body model.InscriptionBRC20ModuleSwapApproveContent
if err := json.Unmarshal(data.ContentBody, &body); err != nil {
log.Printf("parse approve json failed. txid: %s",
hex.EncodeToString(utils.ReverseBytes([]byte(data.TxId))),
)
return err
}
// lower case only
if body.Module != strings.ToLower(body.Module) {
return errors.New("module id invalid")
}
moduleInfo, ok := g.ModulesInfoMap[body.Module]
if !ok { // invalid module
return errors.New("module invalid")
}
if len(body.Tick) != 4 {
return errors.New("tick invalid")
}
uniqueLowerTicker := strings.ToLower(body.Tick)
tokenInfo, ok := g.InscriptionsTickerInfoMap[uniqueLowerTicker]
if !ok {
return errors.New("tick not exist")
}
tinfo := tokenInfo.Deploy
// check amount
amt, err := decimal.NewDecimalFromString(body.Amount, int(tinfo.Decimal))
if err != nil {
return errors.New(fmt.Sprintf("cond approve amount invalid: %s", body.Amount))
}
if amt.Sign() <= 0 || amt.Cmp(tinfo.Max) > 0 {
return errors.New("amount out of range")
}
balanceCondApprove := decimal.NewDecimalCopy(amt)
body.Tick = tokenInfo.Ticker
condApproveInfo := &model.InscriptionBRC20SwapConditionalApproveInfo{
Data: data,
}
condApproveInfo.Module = body.Module
condApproveInfo.Tick = tokenInfo.Ticker
condApproveInfo.Amount = balanceCondApprove
condApproveInfo.Balance = decimal.NewDecimalCopy(balanceCondApprove)
condApproveInfo.OwnerPkScript = data.PkScript
// global history
historyData := &model.BRC20SwapHistoryCondApproveData{
Tick: condApproveInfo.Tick,
Amount: condApproveInfo.Amount.String(),
Balance: condApproveInfo.Balance.String(),
}
history := model.NewBRC20ModuleHistory(false, constant.BRC20_HISTORY_SWAP_TYPE_N_INSCRIBE_CONDITIONAL_APPROVE, data, data, historyData, true)
moduleInfo.History = append(moduleInfo.History, history)
moduleTokenBalance := moduleInfo.GetUserTokenBalance(condApproveInfo.Tick, data.PkScript)
if moduleTokenBalance.AvailableBalance.Cmp(balanceCondApprove) < 0 { // invalid
history.Valid = false
g.InscriptionsInvalidConditionalApproveMap[data.CreateIdxKey] = condApproveInfo
} else {
history.Valid = true
// The available balance here will be directly deducted and transferred to ApproveableBalance.
moduleTokenBalance.AvailableBalanceSafe = moduleTokenBalance.AvailableBalanceSafe.Sub(balanceCondApprove)
moduleTokenBalance.AvailableBalance = moduleTokenBalance.AvailableBalance.Sub(balanceCondApprove)
moduleTokenBalance.CondApproveableBalance = moduleTokenBalance.CondApproveableBalance.Add(balanceCondApprove)
// Update personal approve lookup table ValidApproveMap
if moduleTokenBalance.ValidConditionalApproveMap == nil {
moduleTokenBalance.ValidConditionalApproveMap = make(map[string]*model.InscriptionBRC20Data, 1)
}
moduleTokenBalance.ValidConditionalApproveMap[data.CreateIdxKey] = data
// Update global approve lookup table
g.InscriptionsValidConditionalApproveMap[data.CreateIdxKey] = condApproveInfo
// g.InscriptionsValidBRC20DataMap[data.CreateIdxKey] = condApproveInfo.Data // fixme
// record state
stateBalance := moduleInfo.GetTickConditionalApproveStateBalance(condApproveInfo.Tick)
stateBalance.BalanceNewApprove = stateBalance.BalanceNewApprove.Add(balanceCondApprove)
}
return nil
}

142
indexer/module_create.go Normal file
View File

@@ -0,0 +1,142 @@
package indexer
import (
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"log"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20ModuleIndexer) ProcessCreateModule(data *model.InscriptionBRC20Data) error {
var body model.InscriptionBRC20ModuleDeploySwapContent
if err := json.Unmarshal(data.ContentBody, &body); err != nil {
log.Printf("parse create module json failed. txid: %s",
hex.EncodeToString(utils.ReverseBytes([]byte(data.TxId))),
)
return err
}
if conf.MODULE_SWAP_SOURCE_INSCRIPTION_ID != body.Source {
return errors.New(fmt.Sprintf("source not match: %s", body.Source))
}
inscriptionId := data.GetInscriptionId()
log.Printf("create module: %s", inscriptionId)
if _, ok := g.ModulesInfoMap[inscriptionId]; ok {
return errors.New("dup module deploy") // impossible
}
// feeRateSwap
feeRateSwap, ok := body.Init["swap_fee_rate"]
if !ok {
feeRateSwap = "0"
}
if _, ok := CheckAmountVerify(feeRateSwap, 3); !ok {
return errors.New("swap fee invalid")
}
// gasTick
gasTick, ok := body.Init["gas_tick"]
if !ok {
// gas is not optional
return errors.New("gas_tick missing")
}
if _, ok := g.CheckTickVerify(gasTick, ""); !ok {
log.Printf("create module gas tick[%s] invalid", gasTick)
return errors.New("gas_tick invalid")
}
// sequencer default
sequencerPkScript := data.PkScript
if sequencer, ok := body.Init["sequencer"]; ok {
if pk, err := utils.GetPkScriptByAddress(sequencer, conf.GlobalNetParams); err != nil {
return errors.New("sequencer invalid")
} else {
sequencerPkScript = string(pk)
}
} else {
return errors.New("sequencer missing")
}
// gasTo default
gasToPkScript := data.PkScript
if gasTo, ok := body.Init["gas_to"]; ok {
if pk, err := utils.GetPkScriptByAddress(gasTo, conf.GlobalNetParams); err != nil {
return errors.New("gasTo invalid")
} else {
gasToPkScript = string(pk)
}
} else {
return errors.New("gas_to missing")
}
// lpFeeTo default
lpFeeToPkScript := data.PkScript
if lpFeeTo, ok := body.Init["fee_to"]; ok {
if pk, err := utils.GetPkScriptByAddress(lpFeeTo, conf.GlobalNetParams); err != nil {
return errors.New("lpFeeTo invalid")
} else {
lpFeeToPkScript = string(pk)
}
} else {
return errors.New("fee_to missing")
}
m := &model.BRC20ModuleSwapInfo{
ID: inscriptionId,
Name: body.Name,
DeployerPkScript: data.PkScript, // deployer
SequencerPkScript: sequencerPkScript, // Sequencer
GasToPkScript: gasToPkScript,
LpFeePkScript: lpFeeToPkScript,
FeeRateSwap: feeRateSwap,
GasTick: gasTick,
History: make([]*model.BRC20ModuleHistory, 0),
// runtime for commit
CommitInvalidMap: make(map[string]struct{}, 0),
CommitIdChainMap: make(map[string]struct{}, 0),
CommitIdMap: make(map[string]struct{}, 0),
// runtime for holders
// token holders in module
// ticker of users in module [address][tick]balanceData
UsersTokenBalanceDataMap: make(map[string]map[string]*model.BRC20ModuleTokenBalance, 0),
// token balance of address in module [tick][address]balanceData
TokenUsersBalanceDataMap: make(map[string]map[string]*model.BRC20ModuleTokenBalance, 0),
// swap
// lp token balance of address in module [pool][address]balance
LPTokenUsersBalanceMap: make(map[string]map[string]*decimal.Decimal, 0),
// lp token of users in module [moduleid][address][pool]balance
UsersLPTokenBalanceMap: make(map[string]map[string]*decimal.Decimal, 0),
// swap total balance
// total balance of pool in module [pool]balanceData
SwapPoolTotalBalanceDataMap: make(map[string]*model.BRC20ModulePoolTotalBalance, 0),
ConditionalApproveStateBalanceDataMap: make(map[string]*model.BRC20ModuleConditionalApproveStateBalance, 0),
}
// deployInfo := model.NewInscriptionBRC20SwapInfo(data)
// deployInfo.Module = inscriptionId
history := model.NewBRC20ModuleHistory(false, constant.BRC20_HISTORY_MODULE_TYPE_N_INSCRIBE_MODULE, data, data, nil, true)
m.History = append(m.History, history)
g.ModulesInfoMap[inscriptionId] = m
return nil
}

View File

@@ -0,0 +1,34 @@
package indexer
import (
"encoding/hex"
"encoding/json"
"errors"
"log"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func (g *BRC20ModuleIndexer) ProcessInscribeWithdraw(data *model.InscriptionBRC20Data) error {
var body model.InscriptionBRC20ModuleWithdrawContent
if err := json.Unmarshal(data.ContentBody, &body); err != nil {
log.Printf("parse module withdraw json failed. txid: %s",
hex.EncodeToString(utils.ReverseBytes([]byte(data.TxId))),
)
return err
}
// lower case only
if body.Module != strings.ToLower(body.Module) {
return errors.New("module id invalid")
}
if _, ok := g.ModulesInfoMap[body.Module]; !ok { // invalid module
return errors.New("module invalid")
}
// black module
return nil
}

357
indexer/store.go Normal file
View File

@@ -0,0 +1,357 @@
package indexer
import (
"encoding/gob"
"log"
"os"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
)
type BRC20ModuleIndexerStore struct {
BestHeight uint32
EnableHistory bool
HistoryCount uint32
FirstHistoryByHeight map[uint32]uint32
LastHistoryHeight uint32
// brc20 base
AllHistory []uint32
UserAllHistory map[string]*model.BRC20UserHistory
InscriptionsTickerInfoMap map[string]*model.BRC20TokenInfo
UserTokensBalanceData map[string]map[string]*model.BRC20TokenBalance
InscriptionsValidBRC20DataMap map[string]*model.InscriptionBRC20InfoResp
// inner valid transfer
InscriptionsValidTransferMap map[string]*model.InscriptionBRC20TickInfo
// inner invalid transfer
InscriptionsInvalidTransferMap map[string]*model.InscriptionBRC20TickInfo
// module
// all modules info
ModulesInfoMap map[string]*model.BRC20ModuleSwapInfoStore
// module of users [address]moduleid
UsersModuleWithTokenMap map[string]string
// module lp of users [address]moduleid
UsersModuleWithLpTokenMap map[string]string
// runtime for approve
InscriptionsValidApproveMap map[string]*model.InscriptionBRC20SwapInfo // inner valid approve
InscriptionsInvalidApproveMap map[string]*model.InscriptionBRC20SwapInfo
// runtime for conditional approve
InscriptionsValidConditionalApproveMap map[string]*model.InscriptionBRC20SwapConditionalApproveInfo
InscriptionsInvalidConditionalApproveMap map[string]*model.InscriptionBRC20SwapConditionalApproveInfo
// runtime for commit
InscriptionsValidCommitMap map[string]*model.InscriptionBRC20Data // inner valid commit by key
InscriptionsInvalidCommitMap map[string]*model.InscriptionBRC20Data
}
func (g *BRC20ModuleIndexer) Load(fname string) {
log.Printf("loading brc20 ...")
gobFile, err := os.Open(fname)
if err != nil {
log.Printf("open brc20 file failed: %s", err)
return
}
gob.Register(model.BRC20SwapHistoryApproveData{})
gob.Register(model.BRC20SwapHistoryCondApproveData{})
gobDec := gob.NewDecoder(gobFile)
store := &BRC20ModuleIndexerStore{}
if err := gobDec.Decode(&store); err != nil {
log.Printf("load store failed: %s", err)
return
}
g.LoadStore(store)
log.Printf("load brc20 ok")
}
func (g *BRC20ModuleIndexer) Save(fname string) {
log.Printf("saving brc20 ...")
gobFile, err := os.OpenFile(fname, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0777)
if err != nil {
log.Printf("open brc20 file failed: %s", err)
return
}
defer gobFile.Close()
gob.Register(model.BRC20SwapHistoryApproveData{})
gob.Register(model.BRC20SwapHistoryCondApproveData{})
enc := gob.NewEncoder(gobFile)
if err := enc.Encode(g.GetStore()); err != nil {
log.Printf("save store failed: %s", err)
return
}
log.Printf("save brc20 ok")
}
func (g *BRC20ModuleIndexer) LoadHistory(fname string) {
log.Printf("loading brc20 history...")
gobFile, err := os.Open(fname)
if err != nil {
log.Printf("open brc20 history file failed: %s", err)
return
}
gobDec := gob.NewDecoder(gobFile)
for {
var h []byte
if err := gobDec.Decode(&h); err != nil {
log.Printf("load history data end: %s", err)
break
}
g.HistoryData = append(g.HistoryData, h)
}
log.Printf("load brc20 history ok: %d", len(g.HistoryData))
}
func (g *BRC20ModuleIndexer) SaveHistory(fname string) {
log.Printf("saving brc20 history...")
gobFile, err := os.OpenFile(fname, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0777)
if err != nil {
log.Printf("open brc20 history file failed: %s", err)
return
}
defer gobFile.Close()
enc := gob.NewEncoder(gobFile)
for _, h := range g.HistoryData {
if err := enc.Encode(h); err != nil {
log.Printf("save history data failed: %s", err)
return
}
}
log.Printf("save brc20 history ok")
}
func (g *BRC20ModuleIndexer) GetStore() (store *BRC20ModuleIndexerStore) {
store = &BRC20ModuleIndexerStore{
BestHeight: g.BestHeight,
EnableHistory: g.EnableHistory,
HistoryCount: g.HistoryCount,
FirstHistoryByHeight: g.FirstHistoryByHeight,
LastHistoryHeight: g.LastHistoryHeight,
// brc20 base
AllHistory: g.AllHistory,
UserAllHistory: g.UserAllHistory,
InscriptionsTickerInfoMap: g.InscriptionsTickerInfoMap,
UserTokensBalanceData: g.UserTokensBalanceData,
InscriptionsValidBRC20DataMap: g.InscriptionsValidBRC20DataMap,
// inner valid transfer
InscriptionsValidTransferMap: g.InscriptionsValidTransferMap,
// inner invalid transfer
InscriptionsInvalidTransferMap: g.InscriptionsInvalidTransferMap,
// module
// all modules info
// module of users [address]moduleid
UsersModuleWithTokenMap: g.UsersModuleWithTokenMap,
// module lp of users [address]moduleid
UsersModuleWithLpTokenMap: g.UsersModuleWithLpTokenMap,
// runtime for approve
InscriptionsValidApproveMap: g.InscriptionsValidApproveMap,
InscriptionsInvalidApproveMap: g.InscriptionsInvalidApproveMap,
// runtime for conditional approve
InscriptionsValidConditionalApproveMap: g.InscriptionsValidConditionalApproveMap,
InscriptionsInvalidConditionalApproveMap: g.InscriptionsInvalidConditionalApproveMap,
// runtime for commit
InscriptionsValidCommitMap: g.InscriptionsValidCommitMap,
InscriptionsInvalidCommitMap: g.InscriptionsInvalidCommitMap,
}
store.ModulesInfoMap = make(map[string]*model.BRC20ModuleSwapInfoStore)
for module, info := range g.ModulesInfoMap {
infoStore := &model.BRC20ModuleSwapInfoStore{
ID: info.ID,
Name: info.Name,
DeployerPkScript: info.DeployerPkScript,
SequencerPkScript: info.SequencerPkScript,
GasToPkScript: info.GasToPkScript,
LpFeePkScript: info.LpFeePkScript,
FeeRateSwap: info.FeeRateSwap,
GasTick: info.GasTick,
History: info.History, // fixme
// runtime for commit
CommitInvalidMap: info.CommitInvalidMap,
CommitIdMap: info.CommitIdMap,
CommitIdChainMap: info.CommitIdChainMap,
// token holders in module
// ticker of users in module [address][tick]balanceData
UsersTokenBalanceDataMap: info.UsersTokenBalanceDataMap,
// swap
// lp token balance of address in module [pool][address]balance
LPTokenUsersBalanceMap: info.LPTokenUsersBalanceMap,
// swap total balance
// total balance of pool in module [pool]balanceData
SwapPoolTotalBalanceDataMap: info.SwapPoolTotalBalanceDataMap,
// module deposit/withdraw state [tick]balanceData
ConditionalApproveStateBalanceDataMap: info.ConditionalApproveStateBalanceDataMap,
}
store.ModulesInfoMap[module] = infoStore
}
return store
}
func (g *BRC20ModuleIndexer) LoadStore(store *BRC20ModuleIndexerStore) {
g.BestHeight = store.BestHeight
g.EnableHistory = store.EnableHistory
g.HistoryCount = store.HistoryCount
g.FirstHistoryByHeight = store.FirstHistoryByHeight
g.LastHistoryHeight = store.LastHistoryHeight
// brc20 base
g.AllHistory = store.AllHistory
g.UserAllHistory = store.UserAllHistory
g.InscriptionsTickerInfoMap = store.InscriptionsTickerInfoMap
g.UserTokensBalanceData = store.UserTokensBalanceData
// balance
for u, userTokens := range g.UserTokensBalanceData {
for uniqueLowerTicker, balance := range userTokens {
tokenUsers, ok := g.TokenUsersBalanceData[uniqueLowerTicker]
if !ok {
tokenUsers = make(map[string]*model.BRC20TokenBalance, 0)
g.TokenUsersBalanceData[uniqueLowerTicker] = tokenUsers
}
if balance.OverallBalance().Sign() > 0 {
tokenUsers[u] = balance
}
}
}
g.InscriptionsValidBRC20DataMap = store.InscriptionsValidBRC20DataMap
// inner valid transfer
g.InscriptionsValidTransferMap = store.InscriptionsValidTransferMap
// inner invalid transfer
g.InscriptionsInvalidTransferMap = store.InscriptionsInvalidTransferMap
// module
// all modules info
// module of users [address]moduleid
g.UsersModuleWithTokenMap = store.UsersModuleWithTokenMap
// module lp of users [address]moduleid
g.UsersModuleWithLpTokenMap = store.UsersModuleWithLpTokenMap
// runtime for approve
g.InscriptionsValidApproveMap = store.InscriptionsValidApproveMap
g.InscriptionsInvalidApproveMap = store.InscriptionsInvalidApproveMap
// runtime for conditional approve
g.InscriptionsValidConditionalApproveMap = store.InscriptionsValidConditionalApproveMap
g.InscriptionsInvalidConditionalApproveMap = store.InscriptionsInvalidConditionalApproveMap
// runtime for commit
g.InscriptionsValidCommitMap = store.InscriptionsValidCommitMap
g.InscriptionsInvalidCommitMap = store.InscriptionsInvalidCommitMap
// InscriptionsValidCommitMapById
for _, v := range g.InscriptionsValidCommitMap {
g.InscriptionsValidCommitMapById[v.GetInscriptionId()] = v
}
for module, infoStore := range store.ModulesInfoMap {
info := &model.BRC20ModuleSwapInfo{
ID: infoStore.ID,
Name: infoStore.Name,
DeployerPkScript: infoStore.DeployerPkScript,
SequencerPkScript: infoStore.SequencerPkScript,
GasToPkScript: infoStore.GasToPkScript,
LpFeePkScript: infoStore.LpFeePkScript,
FeeRateSwap: infoStore.FeeRateSwap,
GasTick: infoStore.GasTick,
History: infoStore.History,
// runtime for commit
CommitInvalidMap: infoStore.CommitInvalidMap,
CommitIdMap: infoStore.CommitIdMap,
CommitIdChainMap: infoStore.CommitIdChainMap,
// token holders in module
// ticker of users in module [address][tick]balanceData
UsersTokenBalanceDataMap: infoStore.UsersTokenBalanceDataMap,
TokenUsersBalanceDataMap: make(map[string]map[string]*model.BRC20ModuleTokenBalance, 0),
// swap
// lp token balance of address in module [pool][address]balance
LPTokenUsersBalanceMap: infoStore.LPTokenUsersBalanceMap,
UsersLPTokenBalanceMap: make(map[string]map[string]*decimal.Decimal, 0),
// swap total balance
// total balance of pool in module [pool]balanceData
SwapPoolTotalBalanceDataMap: infoStore.SwapPoolTotalBalanceDataMap,
// module deposit/withdraw state [tick]balanceData
ConditionalApproveStateBalanceDataMap: infoStore.ConditionalApproveStateBalanceDataMap,
}
// tick/user: balance
for address, dataMap := range info.UsersTokenBalanceDataMap {
for uniqueLowerTicker, tokenBalance := range dataMap {
tokenUsers, ok := info.TokenUsersBalanceDataMap[uniqueLowerTicker]
if !ok {
tokenUsers = make(map[string]*model.BRC20ModuleTokenBalance, 0)
info.TokenUsersBalanceDataMap[uniqueLowerTicker] = tokenUsers
}
tokenUsers[address] = tokenBalance
}
}
// pair/user: lpbalance
for pair, dataMap := range info.LPTokenUsersBalanceMap {
for address, lpBalance := range dataMap {
userTokens, ok := info.UsersLPTokenBalanceMap[address]
if !ok {
userTokens = make(map[string]*decimal.Decimal, 0)
info.UsersLPTokenBalanceMap[address] = userTokens
}
userTokens[pair] = lpBalance
}
}
g.ModulesInfoMap[module] = info
}
}

56
loader/dump.go Normal file
View File

@@ -0,0 +1,56 @@
package loader
import (
"encoding/hex"
"fmt"
"log"
"os"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func DumpBRC20InputData(fname string, brc20Datas chan interface{}, hexBody bool) {
file, err := os.OpenFile(fname, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0777)
if err != nil {
log.Fatalf("open block index file failed, %s", err)
return
}
defer file.Close()
for dataIn := range brc20Datas {
data := dataIn.(*model.InscriptionBRC20Data)
var body, address string
if hexBody {
body = hex.EncodeToString(data.ContentBody)
address = hex.EncodeToString([]byte(data.PkScript))
} else {
body = strings.ReplaceAll(string(data.ContentBody), "\n", " ")
address, err = utils.GetAddressFromScript([]byte(data.PkScript), conf.GlobalNetParams)
if err != nil {
address = hex.EncodeToString([]byte(data.PkScript))
}
}
fmt.Fprintf(file, "%t %s %d %d %d %d %s %d %s %s %d %d %d %d\n",
data.IsTransfer,
hex.EncodeToString([]byte(data.TxId)),
data.Idx,
data.Vout,
data.Offset,
data.Satoshi,
address,
data.InscriptionNumber,
body,
hex.EncodeToString([]byte(data.CreateIdxKey)),
data.Height,
data.TxIdx,
data.BlockTime,
data.Sequence,
)
}
}

127
loader/json.go Normal file
View File

@@ -0,0 +1,127 @@
package loader
import (
"bufio"
"encoding/hex"
"fmt"
"os"
"strconv"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/model"
)
func LoadBRC20InputJsonData(fname string) ([]*model.InscriptionBRC20Data, error) {
file, err := os.Open(fname)
if err != nil {
return nil, err
}
defer file.Close()
var brc20Datas []*model.InscriptionBRC20Data
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := scanner.Text()
if len(line) == 0 {
continue
}
if strings.HasPrefix(line, "#") {
continue
}
fields := strings.Split(line, " ")
if len(fields) != 13 {
return nil, fmt.Errorf("invalid data format")
}
var data model.InscriptionBRC20Data
data.IsTransfer, err = strconv.ParseBool(fields[0])
if err != nil {
return nil, err
}
txid, err := hex.DecodeString(fields[1])
if err != nil {
return nil, err
}
data.TxId = string(txid)
idx, err := strconv.ParseUint(fields[2], 10, 32)
if err != nil {
return nil, err
}
data.Idx = uint32(idx)
vout, err := strconv.ParseUint(fields[3], 10, 32)
if err != nil {
return nil, err
}
data.Vout = uint32(vout)
offset, err := strconv.ParseUint(fields[4], 10, 64)
if err != nil {
return nil, err
}
data.Offset = uint64(offset)
satoshi, err := strconv.ParseUint(fields[5], 10, 64)
if err != nil {
return nil, err
}
data.Satoshi = uint64(satoshi)
pkScript, err := hex.DecodeString(fields[6])
if err != nil {
return nil, err
}
data.PkScript = string(pkScript)
inscriptionNumber, err := strconv.ParseInt(fields[7], 10, 64)
if err != nil {
return nil, err
}
data.InscriptionNumber = int64(inscriptionNumber)
data.ContentBody = []byte(fields[8])
// data.ContentBody, err = hex.DecodeString(fields[8])
// if err != nil {
// return nil, err
// }
createIdxKey, err := hex.DecodeString(fields[9])
if err != nil {
return nil, err
}
data.CreateIdxKey = string(createIdxKey)
height, err := strconv.ParseUint(fields[10], 10, 32)
if err != nil {
return nil, err
}
data.Height = uint32(height)
txIdx, err := strconv.ParseUint(fields[11], 10, 32)
if err != nil {
return nil, err
}
data.TxIdx = uint32(txIdx)
blockTime, err := strconv.ParseUint(fields[12], 10, 32)
if err != nil {
return nil, err
}
data.BlockTime = uint32(blockTime)
brc20Datas = append(brc20Datas, &data)
}
if err := scanner.Err(); err != nil {
return nil, err
}
return brc20Datas, nil
}

View File

@@ -10,22 +10,20 @@ import (
"strconv"
"strings"
"github.com/btcsuite/btcd/chaincfg"
"github.com/unisat-wallet/libbrc20-indexer/conf"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/model"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func LoadBRC20InputData(fname string) ([]*model.InscriptionBRC20Data, error) {
var contentMap map[string][]byte = make(map[string][]byte, 0)
func LoadBRC20InputData(fname string, brc20Datas chan interface{}) error {
file, err := os.Open(fname)
if err != nil {
return nil, err
return err
}
defer file.Close()
var brc20Datas []*model.InscriptionBRC20Data
scanner := bufio.NewScanner(file)
max := 128 * 1024 * 1024
buf := make([]byte, max)
@@ -35,122 +33,109 @@ func LoadBRC20InputData(fname string) ([]*model.InscriptionBRC20Data, error) {
line := scanner.Text()
fields := strings.Split(line, " ")
if len(fields) != 14 {
return nil, fmt.Errorf("invalid data format")
if len(fields) != 13 {
return fmt.Errorf("invalid data format")
}
var data model.InscriptionBRC20Data
data.IsTransfer, err = strconv.ParseBool(fields[0])
sequence, err := strconv.ParseUint(fields[0], 10, 16)
if err != nil {
return nil, err
return err
}
data.Sequence = uint16(sequence)
data.IsTransfer = (data.Sequence > 0)
txid, err := hex.DecodeString(fields[1])
if err != nil {
return nil, err
return err
}
data.TxId = string(txid)
idx, err := strconv.ParseUint(fields[2], 10, 32)
if err != nil {
return nil, err
return err
}
data.Idx = uint32(idx)
vout, err := strconv.ParseUint(fields[3], 10, 32)
if err != nil {
return nil, err
return err
}
data.Vout = uint32(vout)
offset, err := strconv.ParseUint(fields[4], 10, 32)
offset, err := strconv.ParseUint(fields[4], 10, 64)
if err != nil {
return nil, err
return err
}
data.Offset = uint32(offset)
data.Offset = uint64(offset)
satoshi, err := strconv.ParseUint(fields[5], 10, 64)
if err != nil {
return nil, err
return err
}
data.Satoshi = uint64(satoshi)
pkScript, err := hex.DecodeString(fields[6])
if err != nil {
return nil, err
return err
}
data.PkScript = string(pkScript)
inscriptionNumber, err := strconv.ParseInt(fields[7], 10, 64)
if err != nil {
return nil, err
return err
}
data.InscriptionNumber = int64(inscriptionNumber)
if content, ok := contentMap[fields[8]]; ok {
data.ContentBody = content
} else {
content, err := hex.DecodeString(fields[8])
if err != nil {
return nil, err
}
data.ContentBody = content
contentMap[fields[8]] = content
content, err := hex.DecodeString(fields[8])
if err != nil {
return err
}
data.ContentBody = content
createIdxKey, err := hex.DecodeString(fields[9])
if err != nil {
return nil, err
return err
}
data.CreateIdxKey = string(createIdxKey)
height, err := strconv.ParseUint(fields[10], 10, 32)
if err != nil {
return nil, err
return err
}
data.Height = uint32(height)
txIdx, err := strconv.ParseUint(fields[11], 10, 32)
if err != nil {
return nil, err
return err
}
data.TxIdx = uint32(txIdx)
blockTime, err := strconv.ParseUint(fields[12], 10, 32)
if err != nil {
return nil, err
return err
}
data.BlockTime = uint32(blockTime)
sequence, err := strconv.ParseUint(fields[13], 10, 16)
if err != nil {
return nil, err
}
data.Sequence = uint16(sequence)
brc20Datas = append(brc20Datas, &data)
brc20Datas <- &data
}
if err := scanner.Err(); err != nil {
return nil, err
return err
}
return brc20Datas, nil
return nil
}
func DumpTickerInfoMap(fname string,
historyData [][]byte,
inscriptionsTickerInfoMap map[string]*model.BRC20TokenInfo,
userTokensBalanceData map[string]map[string]*model.BRC20TokenBalance,
tokenUsersBalanceData map[string]map[string]*model.BRC20TokenBalance,
testnet bool,
) {
netParams := &chaincfg.MainNetParams
if testnet {
netParams = &chaincfg.TestNet3Params
}
file, err := os.OpenFile(fname, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0777)
if err != nil {
log.Fatalf("open block index file failed, %s", err)
@@ -169,7 +154,11 @@ func DumpTickerInfoMap(fname string,
for _, ticker := range allTickers {
info := inscriptionsTickerInfoMap[ticker]
nValid := 0
for _, h := range info.History {
for _, hIdx := range info.History {
buf := historyData[hIdx]
h := &model.BRC20History{}
h.Unmarshal(buf)
if h.Valid {
nValid++
}
@@ -184,24 +173,28 @@ func DumpTickerInfoMap(fname string,
)
// history
for _, h := range info.History {
for _, hIdx := range info.History {
buf := historyData[hIdx]
h := &model.BRC20History{}
h.Unmarshal(buf)
if !h.Valid {
continue
}
addressFrom, err := utils.GetAddressFromScript([]byte(h.PkScriptFrom), netParams)
addressFrom, err := utils.GetAddressFromScript([]byte(h.PkScriptFrom), conf.GlobalNetParams)
if err != nil {
addressFrom = hex.EncodeToString([]byte(h.PkScriptFrom))
}
addressTo, err := utils.GetAddressFromScript([]byte(h.PkScriptTo), netParams)
addressTo, err := utils.GetAddressFromScript([]byte(h.PkScriptTo), conf.GlobalNetParams)
if err != nil {
addressTo = hex.EncodeToString([]byte(h.PkScriptTo))
}
fmt.Fprintf(file, "%s %s %s %s %s -> %s\n",
info.Ticker,
utils.GetReversedStringHex(h.TxId),
utils.HashString([]byte(h.TxId)),
constant.BRC20_HISTORY_TYPE_NAMES[h.Type],
h.Amount,
addressFrom,
@@ -210,19 +203,20 @@ func DumpTickerInfoMap(fname string,
}
// holders
var allHolders []string
var allHoldersPkScript []string
for holder := range tokenUsersBalanceData[ticker] {
allHolders = append(allHolders, holder)
allHoldersPkScript = append(allHoldersPkScript, holder)
}
sort.SliceStable(allHolders, func(i, j int) bool {
return allHolders[i] < allHolders[j]
// sort by holder address
sort.SliceStable(allHoldersPkScript, func(i, j int) bool {
return allHoldersPkScript[i] < allHoldersPkScript[j]
})
// holders
for _, holder := range allHolders {
for _, holder := range allHoldersPkScript {
balanceData := tokenUsersBalanceData[ticker][holder]
address, err := utils.GetAddressFromScript([]byte(balanceData.PkScript), netParams)
address, err := utils.GetAddressFromScript([]byte(balanceData.PkScript), conf.GlobalNetParams)
if err != nil {
address = hex.EncodeToString([]byte(balanceData.PkScript))
}
@@ -231,8 +225,203 @@ func DumpTickerInfoMap(fname string,
address,
len(balanceData.History),
len(balanceData.ValidTransferMap),
balanceData.OverallBalance.String(),
len(userTokensBalanceData[string(balanceData.PkScript)]),
balanceData.OverallBalance().String(),
len(userTokensBalanceData[holder]),
)
}
}
}
func DumpModuleInfoMap(fname string,
modulesInfoMap map[string]*model.BRC20ModuleSwapInfo,
) {
file, err := os.OpenFile(fname, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0777)
if err != nil {
log.Fatalf("open module dump file failed, %s", err)
return
}
defer file.Close()
var allModules []string
for moduleId := range modulesInfoMap {
allModules = append(allModules, moduleId)
}
sort.SliceStable(allModules, func(i, j int) bool {
return allModules[i] < allModules[j]
})
for _, moduleId := range allModules {
info := modulesInfoMap[moduleId]
nValid := 0
for _, h := range info.History {
if h.Valid {
nValid++
}
}
fmt.Fprintf(file, "module %s(%s) nHistory: %d, nValidHistory: %d, nCommit: %d, nTickers: %d, nHolders: %d, swap: %d, lpholders: %d\n",
info.Name,
info.ID,
len(info.History),
nValid,
len(info.CommitIdChainMap),
len(info.TokenUsersBalanceDataMap),
len(info.UsersTokenBalanceDataMap),
len(info.LPTokenUsersBalanceMap),
len(info.UsersLPTokenBalanceMap),
)
DumpModuleTickInfoMap(file, info.ConditionalApproveStateBalanceDataMap, info.TokenUsersBalanceDataMap, info.UsersTokenBalanceDataMap)
DumpModuleSwapInfoMap(file, info.SwapPoolTotalBalanceDataMap, info.LPTokenUsersBalanceMap, info.UsersLPTokenBalanceMap)
}
}
func DumpModuleTickInfoMap(file *os.File, condStateBalanceDataMap map[string]*model.BRC20ModuleConditionalApproveStateBalance,
inscriptionsTickerInfoMap, userTokensBalanceData map[string]map[string]*model.BRC20ModuleTokenBalance,
) {
var allTickers []string
for ticker := range inscriptionsTickerInfoMap {
allTickers = append(allTickers, ticker)
}
sort.SliceStable(allTickers, func(i, j int) bool {
return allTickers[i] < allTickers[j]
})
for _, ticker := range allTickers {
holdersMap := inscriptionsTickerInfoMap[ticker]
nHistory := 0
nValid := 0
var allHoldersPkScript []string
for holder, data := range holdersMap {
nHistory += len(data.History)
for _, h := range data.History {
if h.Valid {
nValid++
}
}
allHoldersPkScript = append(allHoldersPkScript, holder)
}
sort.SliceStable(allHoldersPkScript, func(i, j int) bool {
return allHoldersPkScript[i] < allHoldersPkScript[j]
})
fmt.Fprintf(file, " %s nHistory: %d, valid: %d, nHolders: %d\n",
ticker,
nHistory,
nValid,
// TokenTotalBalance[tick], // fixme
len(holdersMap),
)
// holders
for _, holder := range allHoldersPkScript {
balanceData := holdersMap[holder]
address, err := utils.GetAddressFromScript([]byte(balanceData.PkScript), conf.GlobalNetParams)
if err != nil {
address = hex.EncodeToString([]byte(balanceData.PkScript))
}
fmt.Fprintf(file, " %s %s nHistory: %d, bnModule: %s, bnAvai: %s, bnSwap: %s, bnCond: %s, nToken: %d",
ticker,
address,
len(balanceData.History),
balanceData.ModuleBalance().String(),
balanceData.AvailableBalance.String(),
balanceData.SwapAccountBalance.String(),
balanceData.CondApproveableBalance.String(),
len(userTokensBalanceData[string(balanceData.PkScript)]),
)
if len(balanceData.ValidApproveMap) > 0 {
fmt.Fprintf(file, ", nApprove: %d", len(balanceData.ValidApproveMap))
}
if len(balanceData.ValidWithdrawMap) > 0 {
fmt.Fprintf(file, ", nWithdraw: %d", len(balanceData.ValidWithdrawMap))
}
fmt.Fprintf(file, "\n")
}
}
fmt.Fprintf(file, "\n")
// condStateBalanceDataMap
for _, ticker := range allTickers {
stateBalance, ok := condStateBalanceDataMap[ticker]
if !ok {
fmt.Fprintf(file, " module deposit/withdraw state: %s - \n", ticker)
continue
}
fmt.Fprintf(file, " module deposit/withdraw state: %s deposit: %s, match: %s, new: %s, cancel: %s, wait: %s\n",
ticker,
stateBalance.BalanceDeposite.String(),
stateBalance.BalanceApprove.String(),
stateBalance.BalanceNewApprove.String(),
stateBalance.BalanceCancelApprove.String(),
stateBalance.BalanceNewApprove.Sub(
stateBalance.BalanceApprove).Sub(
stateBalance.BalanceCancelApprove).String(),
)
}
fmt.Fprintf(file, "\n")
}
func DumpModuleSwapInfoMap(file *os.File,
swapPoolTotalBalanceDataMap map[string]*model.BRC20ModulePoolTotalBalance,
inscriptionsTickerInfoMap, userTokensBalanceData map[string]map[string]*decimal.Decimal) {
var allTickers []string
for ticker := range inscriptionsTickerInfoMap {
allTickers = append(allTickers, ticker)
}
sort.SliceStable(allTickers, func(i, j int) bool {
return allTickers[i] < allTickers[j]
})
for _, ticker := range allTickers {
holdersMap := inscriptionsTickerInfoMap[ticker]
var allHoldersPkScript []string
for holder := range holdersMap {
allHoldersPkScript = append(allHoldersPkScript, holder)
}
sort.SliceStable(allHoldersPkScript, func(i, j int) bool {
return allHoldersPkScript[i] < allHoldersPkScript[j]
})
swap := swapPoolTotalBalanceDataMap[ticker]
fmt.Fprintf(file, " pool: %s nHistory: %d, nLPholders: %d, lp: %s, %s: %s, %s: %s\n",
ticker,
len(swap.History),
len(holdersMap),
swap.LpBalance,
swap.Tick[0],
swap.TickBalance[0],
swap.Tick[1],
swap.TickBalance[1],
)
// holders
for _, holder := range allHoldersPkScript {
balanceData := holdersMap[holder]
address, err := utils.GetAddressFromScript([]byte(holder), conf.GlobalNetParams)
if err != nil {
address = hex.EncodeToString([]byte(holder))
}
fmt.Fprintf(file, " pool: %s %s lp: %s, swaps: %d\n",
ticker,
address,
balanceData.String(),
len(userTokensBalanceData[holder]),
)
}
}

View File

@@ -0,0 +1,31 @@
package model
import (
"github.com/unisat-wallet/libbrc20-indexer/decimal"
)
// state of approve for each tick, (balance and history)
type BRC20ModuleConditionalApproveStateBalance struct {
Tick string
BalanceDeposite *decimal.Decimal // Direct Charging Total amount
BalanceApprove *decimal.Decimal // Total amount of successful withdrawal matches
BalanceNewApprove *decimal.Decimal // Initiate total withdrawal amount
BalanceCancelApprove *decimal.Decimal // Total amount of withdrawals cancelled
BalanceWaitApprove *decimal.Decimal // Total amount waiting for withdrawal matching
// BalanceNewApprove - BalanceCancelApprove - BalanceApprove == BalanceWaitApprove
}
func (in *BRC20ModuleConditionalApproveStateBalance) DeepCopy() *BRC20ModuleConditionalApproveStateBalance {
tb := &BRC20ModuleConditionalApproveStateBalance{
Tick: in.Tick,
BalanceDeposite: decimal.NewDecimalCopy(in.BalanceDeposite),
BalanceApprove: decimal.NewDecimalCopy(in.BalanceApprove),
BalanceNewApprove: decimal.NewDecimalCopy(in.BalanceNewApprove),
BalanceCancelApprove: decimal.NewDecimalCopy(in.BalanceCancelApprove),
BalanceWaitApprove: decimal.NewDecimalCopy(in.BalanceWaitApprove),
}
return tb
}

472
model/history.go Normal file
View File

@@ -0,0 +1,472 @@
package model
import (
"encoding/binary"
scriptDecoder "github.com/unisat-wallet/libbrc20-indexer/utils/script"
)
type BRC20HistoryBase struct {
Type uint8 // inscribe-deploy/inscribe-mint/inscribe-transfer/transfer/send/receive
Valid bool
TxId string
Idx uint32
Vout uint32
Offset uint64
PkScriptFrom string
PkScriptTo string
Satoshi uint64
Fee int64
Height uint32
TxIdx uint32
BlockTime uint32
}
// history
type BRC20History struct {
BRC20HistoryBase
Inscription InscriptionBRC20TickInfoResp
// param
Amount string
// state
OverallBalance string
TransferableBalance string
AvailableBalance string
}
func NewBRC20History(historyType uint8, isValid bool, isTransfer bool,
from *InscriptionBRC20TickInfo, bal *BRC20TokenBalance, to *InscriptionBRC20Data) *BRC20History {
history := &BRC20History{
BRC20HistoryBase: BRC20HistoryBase{
Type: historyType,
Valid: isValid,
Height: to.Height,
TxIdx: to.TxIdx,
BlockTime: to.BlockTime,
Fee: to.Fee,
},
Inscription: InscriptionBRC20TickInfoResp{
Height: from.Height,
Data: from.Data,
InscriptionNumber: from.InscriptionNumber,
InscriptionId: from.GetInscriptionId(),
Satoshi: from.Satoshi,
},
Amount: from.Amount.String(),
}
if isTransfer {
history.TxId = to.TxId
history.Vout = to.Vout
history.Offset = to.Offset
history.Idx = to.Idx
history.PkScriptFrom = from.PkScript
history.PkScriptTo = to.PkScript
history.Satoshi = to.Satoshi
if history.Satoshi == 0 {
history.PkScriptTo = history.PkScriptFrom
}
} else {
history.TxId = from.TxId
history.Vout = from.Vout
history.Offset = from.Offset
history.Idx = from.Idx
history.PkScriptTo = from.PkScript
history.Satoshi = from.Satoshi
}
if bal != nil {
history.OverallBalance = bal.AvailableBalance.Add(bal.TransferableBalance).String()
history.TransferableBalance = bal.TransferableBalance.String()
history.AvailableBalance = bal.AvailableBalance.String()
}
return history
}
func (h *BRC20History) Marshal() (result []byte) {
var buf [1024]byte
// type
buf[0] = h.Type
// valid
if h.Valid {
buf[1] = 1
} else {
buf[1] = 0
}
// txid
copy(buf[2:2+32], h.TxId[:])
offset := 34
offset += scriptDecoder.PutVLQ(buf[offset:], uint64(h.Idx))
offset += scriptDecoder.PutVLQ(buf[offset:], uint64(h.Vout))
offset += scriptDecoder.PutVLQ(buf[offset:], uint64(h.Offset))
offset += scriptDecoder.PutCompressedScript(buf[offset:], []byte(h.PkScriptFrom))
offset += scriptDecoder.PutCompressedScript(buf[offset:], []byte(h.PkScriptTo))
offset += scriptDecoder.PutVLQ(buf[offset:], uint64(h.Satoshi))
offset += scriptDecoder.PutVLQ(buf[offset:], uint64(h.Fee))
binary.LittleEndian.PutUint32(buf[offset:], h.Height) // 4
offset += 4
offset += scriptDecoder.PutVLQ(buf[offset:], uint64(h.TxIdx))
binary.LittleEndian.PutUint32(buf[offset:], h.BlockTime) // 4
offset += 4
// Amount
n := len(h.Amount)
if n < 40 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], h.Amount[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// OverallBalance
n = len(h.OverallBalance)
if n < 40 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], h.OverallBalance[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// TransferableBalance
n = len(h.TransferableBalance)
if n < 40 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], h.TransferableBalance[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// AvailableBalance
n = len(h.AvailableBalance)
if n < 40 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], h.AvailableBalance[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// Inscription
binary.LittleEndian.PutUint32(buf[offset:], h.Inscription.Height) // 4
offset += 4
offset += scriptDecoder.PutVLQ(buf[offset:], uint64(h.Inscription.InscriptionNumber))
offset += scriptDecoder.PutVLQ(buf[offset:], uint64(h.Inscription.Satoshi))
// inscriptionId
n = len(h.Inscription.InscriptionId)
if n < 70 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], h.Inscription.InscriptionId[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// data
data := h.Inscription.Data
if data == nil {
result = make([]byte, offset)
copy(result, buf[:offset])
return result
}
// BRC20Tick
n = len(data.BRC20Tick)
if n < 16 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], data.BRC20Tick[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// BRC20Max
n = len(data.BRC20Max)
if n < 40 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], data.BRC20Max[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// BRC20Limit
n = len(data.BRC20Limit)
if n < 40 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], data.BRC20Limit[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// BRC20Amount
n = len(data.BRC20Amount)
if n < 40 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], data.BRC20Amount[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// BRC20Decimal
n = len(data.BRC20Decimal)
if n < 8 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], data.BRC20Decimal[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// BRC20Minted
n = len(data.BRC20Minted)
if n < 40 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], data.BRC20Minted[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
// BRC20SelfMint
n = len(data.BRC20SelfMint)
if n < 8 {
buf[offset] = uint8(n)
offset += 1
copy(buf[offset:offset+n], data.BRC20SelfMint[:])
offset += n
} else {
buf[offset] = 0
offset += 1
}
result = make([]byte, offset)
copy(result, buf[:offset])
return result
}
func (h *BRC20History) Unmarshal(buf []byte) {
h.Type = buf[0]
h.Valid = (buf[1] == 1)
h.TxId = string(buf[2 : 2+32])
offset := 34
idx, bytesRead := scriptDecoder.DeserializeVLQ(buf[offset:])
if bytesRead >= len(buf[offset:]) {
return
}
h.Idx = uint32(idx)
offset += bytesRead
vout, bytesRead := scriptDecoder.DeserializeVLQ(buf[offset:])
if bytesRead >= len(buf[offset:]) {
return
}
h.Vout = uint32(vout)
offset += bytesRead
nftOffset, bytesRead := scriptDecoder.DeserializeVLQ(buf[offset:])
if bytesRead >= len(buf[offset:]) {
return
}
h.Offset = nftOffset
offset += bytesRead
// Decode the compressed script size and ensure there are enough bytes
// left in the slice for it.
scriptSize := scriptDecoder.DecodeCompressedScriptSize(buf[offset:])
if len(buf[offset:]) < scriptSize {
return
}
h.PkScriptFrom = string(scriptDecoder.DecompressScript(buf[offset : offset+scriptSize]))
offset += scriptSize
scriptSize = scriptDecoder.DecodeCompressedScriptSize(buf[offset:])
if len(buf[offset:]) < scriptSize {
return
}
h.PkScriptTo = string(scriptDecoder.DecompressScript(buf[offset : offset+scriptSize]))
offset += scriptSize
satoshi, bytesRead := scriptDecoder.DeserializeVLQ(buf[offset:])
if bytesRead >= len(buf[offset:]) {
return
}
h.Satoshi = satoshi
offset += bytesRead
fee, bytesRead := scriptDecoder.DeserializeVLQ(buf[offset:])
if bytesRead >= len(buf[offset:]) {
return
}
h.Fee = int64(fee)
offset += bytesRead
h.Height = binary.LittleEndian.Uint32(buf[offset:]) // 4
offset += 4
txidx, bytesRead := scriptDecoder.DeserializeVLQ(buf[offset:])
if bytesRead >= len(buf[offset:]) {
return
}
h.TxIdx = uint32(txidx)
offset += bytesRead
h.BlockTime = binary.LittleEndian.Uint32(buf[offset:]) // 4
offset += 4
// Amount
n := int(buf[offset])
offset += 1
if n > 0 {
h.Amount = string(buf[offset : offset+n])
offset += n
}
// OverallBalance
n = int(buf[offset])
offset += 1
if n > 0 {
h.OverallBalance = string(buf[offset : offset+n])
offset += n
}
// TransferableBalance
n = int(buf[offset])
offset += 1
if n > 0 {
h.TransferableBalance = string(buf[offset : offset+n])
offset += n
}
// AvailableBalance
n = int(buf[offset])
offset += 1
if n > 0 {
h.AvailableBalance = string(buf[offset : offset+n])
offset += n
}
// Inscription
h.Inscription.Height = binary.LittleEndian.Uint32(buf[offset:]) // 4
offset += 4
number, bytesRead := scriptDecoder.DeserializeVLQ(buf[offset:])
if bytesRead >= len(buf[offset:]) {
return
}
h.Inscription.InscriptionNumber = int64(number)
offset += bytesRead
nftSatoshi, bytesRead := scriptDecoder.DeserializeVLQ(buf[offset:])
if bytesRead >= len(buf[offset:]) {
return
}
h.Inscription.Satoshi = nftSatoshi
offset += bytesRead
// inscriptionId
n = int(buf[offset])
offset += 1
if n > 0 {
h.Inscription.InscriptionId = string(buf[offset : offset+n])
offset += n
}
// data
if len(buf[offset:]) == 0 {
return
}
data := &InscriptionBRC20InfoResp{}
h.Inscription.Data = data
// BRC20Tick
n = int(buf[offset])
offset += 1
if n > 0 {
data.BRC20Tick = string(buf[offset : offset+n])
offset += n
}
// BRC20Max
n = int(buf[offset])
offset += 1
if n > 0 {
data.BRC20Max = string(buf[offset : offset+n])
offset += n
}
// BRC20Limit
n = int(buf[offset])
offset += 1
if n > 0 {
data.BRC20Limit = string(buf[offset : offset+n])
offset += n
}
// BRC20Amount
n = int(buf[offset])
offset += 1
if n > 0 {
data.BRC20Amount = string(buf[offset : offset+n])
offset += n
}
// BRC20Decimal
n = int(buf[offset])
offset += 1
if n > 0 {
data.BRC20Decimal = string(buf[offset : offset+n])
offset += n
}
// BRC20Minted
n = int(buf[offset])
offset += 1
if n > 0 {
data.BRC20Minted = string(buf[offset : offset+n])
offset += n
}
// BRC20SelfMint
n = int(buf[offset])
offset += 1
if n > 0 {
data.BRC20SelfMint = string(buf[offset : offset+n])
offset += n
}
}

66
model/history_module.go Normal file
View File

@@ -0,0 +1,66 @@
package model
// history
type BRC20ModuleHistory struct {
BRC20HistoryBase
Inscription InscriptionBRC20SwapInfoResp
Data any
// no state
}
func NewBRC20ModuleHistory(isTransfer bool, historyType uint8, from, to *InscriptionBRC20Data, data any, isValid bool) *BRC20ModuleHistory {
history := &BRC20ModuleHistory{
BRC20HistoryBase: BRC20HistoryBase{
Type: historyType,
Valid: isValid,
},
Inscription: InscriptionBRC20SwapInfoResp{
Height: from.Height,
ContentBody: from.ContentBody, // to.Content is empty on transfer
InscriptionNumber: from.InscriptionNumber,
InscriptionId: from.GetInscriptionId(),
},
}
if isTransfer {
history.TxId = to.TxId
history.Vout = to.Vout
history.Offset = to.Offset
history.Idx = to.Idx
history.PkScriptFrom = from.PkScript
history.PkScriptTo = to.PkScript
history.Satoshi = to.Satoshi
history.Height = to.Height
history.TxIdx = to.TxIdx
history.BlockTime = to.BlockTime
} else {
history.TxId = from.TxId
history.Vout = from.Vout
history.Offset = from.Offset
history.Idx = from.Idx
history.PkScriptTo = from.PkScript
history.Satoshi = from.Satoshi
history.Height = from.Height
history.TxIdx = from.TxIdx
history.BlockTime = from.BlockTime
}
history.Data = data
return history
}
// history
type BRC20SwapHistoryApproveData struct {
Tick string `json:"tick"`
Amount string `json:"amount"` // current amt
}
// history
type BRC20SwapHistoryCondApproveData struct {
Tick string `json:"tick"`
Amount string `json:"amount"` // current amt
Balance string `json:"balance"` // current balance
TransferInscriptionId string `json:"transfer"` // transfer inscription id
TransferMax string `json:"transferMax"` // transfer inscription id
}

View File

@@ -1,129 +0,0 @@
package model
import (
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
)
type InscriptionBRC20TickInfo struct {
BRC20Tick string `json:"-"`
Operation uint8 `json:"-"`
Decimal uint8 `json:"-"`
Amount *decimal.Decimal `json:"-"`
TxId string `json:"-"`
Idx uint32 `json:"-"`
Vout uint32 `json:"-"`
Satoshi uint64 `json:"-"`
PkScript string `json:"-"`
InscriptionNumber int64 `json:"inscriptionNumber"`
CreateIdxKey string `json:"-"`
Height uint32 `json:"-"`
TxIdx uint32 `json:"-"`
BlockTime uint32 `json:"-"`
}
// for deploy
type InscriptionBRC20TickDeployInfo struct {
InscriptionBRC20TickInfo
Max *decimal.Decimal `json:"-"`
Limit *decimal.Decimal `json:"-"`
TotalMinted *decimal.Decimal `json:"-"`
ConfirmedMinted *decimal.Decimal `json:"-"`
ConfirmedMinted1h *decimal.Decimal `json:"-"`
ConfirmedMinted24h *decimal.Decimal `json:"-"`
MintTimes uint32 `json:"-"`
CompleteHeight uint32 `json:"-"`
CompleteBlockTime uint32 `json:"-"`
InscriptionNumberStart int64 `json:"-"`
InscriptionNumberEnd int64 `json:"-"`
}
func NewInscriptionBRC20TickDeployInfo(body *InscriptionBRC20Content, data *InscriptionBRC20Data) *InscriptionBRC20TickDeployInfo {
info := &InscriptionBRC20TickDeployInfo{
InscriptionBRC20TickInfo: InscriptionBRC20TickInfo{
BRC20Tick: body.BRC20Tick,
Operation: constant.BRC20_OP_N_DEPLOY,
Decimal: 18,
TxId: data.TxId,
Idx: data.Idx,
Vout: data.Vout,
Satoshi: data.Satoshi,
PkScript: data.PkScript,
InscriptionNumber: data.InscriptionNumber,
CreateIdxKey: data.CreateIdxKey,
Height: data.Height,
TxIdx: data.TxIdx,
BlockTime: data.BlockTime,
},
}
return info
}
// for mint
type InscriptionBRC20TickMintInfo struct {
InscriptionBRC20TickInfo
}
func NewInscriptionBRC20TickMintInfo(body *InscriptionBRC20Content, data *InscriptionBRC20Data) *InscriptionBRC20TickMintInfo {
info := &InscriptionBRC20TickMintInfo{
InscriptionBRC20TickInfo: InscriptionBRC20TickInfo{
BRC20Tick: body.BRC20Tick,
Operation: constant.BRC20_OP_N_MINT,
Decimal: 18,
TxId: data.TxId,
Idx: data.Idx,
Vout: data.Vout,
Satoshi: data.Satoshi,
PkScript: data.PkScript,
InscriptionNumber: data.InscriptionNumber,
CreateIdxKey: data.CreateIdxKey,
Height: data.Height,
TxIdx: data.TxIdx,
BlockTime: data.BlockTime,
},
}
return info
}
// for transfer
type InscriptionBRC20TickTransferInfo struct {
InscriptionBRC20TickInfo
}
func NewInscriptionBRC20TickTransferInfo(body *InscriptionBRC20Content, data *InscriptionBRC20Data) *InscriptionBRC20TickTransferInfo {
info := &InscriptionBRC20TickTransferInfo{
InscriptionBRC20TickInfo: InscriptionBRC20TickInfo{
BRC20Tick: body.BRC20Tick,
Operation: constant.BRC20_OP_N_TRANSFER,
Decimal: 18,
TxId: data.TxId,
Idx: data.Idx,
Vout: data.Vout,
Satoshi: data.Satoshi,
PkScript: data.PkScript,
InscriptionNumber: data.InscriptionNumber,
CreateIdxKey: data.CreateIdxKey,
Height: data.Height,
TxIdx: data.TxIdx,
BlockTime: data.BlockTime,
},
}
return info
}

View File

@@ -1,44 +1,100 @@
package model
import (
"encoding/binary"
"encoding/json"
"fmt"
"github.com/unisat-wallet/libbrc20-indexer/constant"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
// nft create point on create
type NFTCreateIdxKey struct {
Height uint32 // Height of NFT show in block onCreate
IdxInBlock uint64 // Index of NFT show in block onCreate
}
func (p *NFTCreateIdxKey) String() string {
var key [12]byte
binary.LittleEndian.PutUint32(key[0:4], p.Height)
binary.LittleEndian.PutUint64(key[4:12], p.IdxInBlock)
return string(key[:])
}
// event raw data
type InscriptionBRC20Data struct {
IsTransfer bool
TxId string `json:"-"`
Idx uint32 `json:"-"`
Vout uint32 `json:"-"`
Offset uint32 `json:"-"`
Offset uint64 `json:"-"`
Satoshi uint64 `json:"-"`
PkScript string `json:"-"`
Fee int64 `json:"-"`
InscriptionNumber int64
Parent []byte
ContentBody []byte
CreateIdxKey string
Height uint32 // Height of NFT show in block onCreate
TxIdx uint32
BlockTime uint32
Sequence uint16
Height uint32 // Height of NFT show in block onCreate
TxIdx uint32
BlockTime uint32
Sequence uint16
// for cache
InscriptionId string
}
type InscriptionBRC20Content struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
BRC20Tick string `json:"tick,omitempty"`
BRC20Max string `json:"max,omitempty"`
BRC20Amount string `json:"amt,omitempty"`
BRC20Limit string `json:"lim,omitempty"` // option
BRC20Decimal string `json:"dec,omitempty"` // option
func (data *InscriptionBRC20Data) GetInscriptionId() string {
if data.InscriptionId == "" {
data.InscriptionId = fmt.Sprintf("%si%d", utils.HashString([]byte(data.TxId)), data.Idx)
}
return data.InscriptionId
}
func (body *InscriptionBRC20Content) Unmarshal(contentBody []byte) (err error) {
type InscriptionBRC20InfoResp struct {
Operation string `json:"op,omitempty"`
BRC20Tick string `json:"tick,omitempty"`
BRC20Max string `json:"max,omitempty"`
BRC20Limit string `json:"lim,omitempty"`
BRC20Amount string `json:"amt,omitempty"`
BRC20Decimal string `json:"decimal,omitempty"`
BRC20Minted string `json:"minted,omitempty"`
BRC20SelfMint string `json:"self_mint,omitempty"`
}
// decode protocal
type InscriptionBRC20ProtocalContent struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
}
func (body *InscriptionBRC20ProtocalContent) Unmarshal(contentBody []byte) (err error) {
var bodyMap map[string]interface{} = make(map[string]interface{}, 8)
if err := json.Unmarshal(contentBody, &bodyMap); err != nil {
return err
}
if v, ok := bodyMap["p"].(string); ok {
body.Proto = v
}
if v, ok := bodyMap["op"].(string); ok {
body.Operation = v
}
return nil
}
// decode mint/transfer
type InscriptionBRC20MintTransferContent struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
BRC20Tick string `json:"tick,omitempty"`
BRC20Amount string `json:"amt,omitempty"`
}
func (body *InscriptionBRC20MintTransferContent) Unmarshal(contentBody []byte) (err error) {
var bodyMap map[string]interface{} = make(map[string]interface{}, 8)
if err := json.Unmarshal(contentBody, &bodyMap); err != nil {
return err
@@ -52,13 +108,46 @@ func (body *InscriptionBRC20Content) Unmarshal(contentBody []byte) (err error) {
if v, ok := bodyMap["tick"].(string); ok {
body.BRC20Tick = v
}
if v, ok := bodyMap["max"].(string); ok {
body.BRC20Max = v
}
if v, ok := bodyMap["amt"].(string); ok {
body.BRC20Amount = v
}
return nil
}
// decode deploy data
type InscriptionBRC20DeployContent struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
BRC20Tick string `json:"tick,omitempty"`
BRC20Max string `json:"max,omitempty"`
BRC20Limit string `json:"lim,omitempty"`
BRC20Decimal string `json:"dec,omitempty"`
BRC20SelfMint string `json:"self_mint,omitempty"`
}
func (body *InscriptionBRC20DeployContent) Unmarshal(contentBody []byte) (err error) {
var bodyMap map[string]interface{} = make(map[string]interface{}, 8)
if err := json.Unmarshal(contentBody, &bodyMap); err != nil {
return err
}
if v, ok := bodyMap["p"].(string); ok {
body.Proto = v
}
if v, ok := bodyMap["op"].(string); ok {
body.Operation = v
}
if v, ok := bodyMap["tick"].(string); ok {
body.BRC20Tick = v
}
if _, ok := bodyMap["self_mint"]; ok { // has self_mint
body.BRC20SelfMint = "false"
}
if v, ok := bodyMap["self_mint"].(string); ok { // self_mint is string
body.BRC20SelfMint = v
}
if v, ok := bodyMap["max"].(string); ok {
body.BRC20Max = v
}
if _, ok := bodyMap["lim"]; !ok {
body.BRC20Limit = body.BRC20Max
} else {
@@ -68,7 +157,7 @@ func (body *InscriptionBRC20Content) Unmarshal(contentBody []byte) (err error) {
}
if _, ok := bodyMap["dec"]; !ok {
body.BRC20Decimal = constant.DEFAULT_DECIMAL_18
body.BRC20Decimal = decimal.MAX_PRECISION_STRING
} else {
if v, ok := bodyMap["dec"].(string); ok {
body.BRC20Decimal = v
@@ -78,99 +167,200 @@ func (body *InscriptionBRC20Content) Unmarshal(contentBody []byte) (err error) {
return nil
}
// all ticker (state and history)
type BRC20TokenInfo struct {
Ticker string
Deploy *InscriptionBRC20TickDeployInfo
Deploy *InscriptionBRC20TickInfo
History []*BRC20History
HistoryMint []*BRC20History
HistoryInscribeTransfer []*BRC20History
HistoryTransfer []*BRC20History
History []uint32
HistoryMint []uint32
HistoryInscribeTransfer []uint32
HistoryTransfer []uint32
}
type InscriptionBRC20TickInfoResp struct {
Height uint32 `json:"-"`
type InscriptionBRC20TransferInfo struct {
Tick string
Amount *decimal.Decimal
Data *InscriptionBRC20Data
}
// inscription info, with mint state
type InscriptionBRC20TickInfo struct {
Data *InscriptionBRC20InfoResp `json:"data"`
Tick string
Amount *decimal.Decimal `json:"-"`
Meta *InscriptionBRC20Data
SelfMint bool `json:"-"`
Max *decimal.Decimal `json:"-"`
Limit *decimal.Decimal `json:"-"`
TotalMinted *decimal.Decimal `json:"-"`
ConfirmedMinted *decimal.Decimal `json:"-"`
ConfirmedMinted1h *decimal.Decimal `json:"-"`
ConfirmedMinted24h *decimal.Decimal `json:"-"`
Burned *decimal.Decimal `json:"-"`
MintTimes uint32 `json:"-"`
Decimal uint8 `json:"-"`
TxId string `json:"-"`
Idx uint32 `json:"-"`
Vout uint32 `json:"-"`
Offset uint64 `json:"-"`
Satoshi uint64 `json:"-"`
PkScript string `json:"-"`
InscriptionNumber int64 `json:"inscriptionNumber"`
InscriptionId string `json:"inscriptionId"`
Confirmations int `json:"confirmations"`
CreateIdxKey string `json:"-"`
Height uint32 `json:"-"`
TxIdx uint32 `json:"-"`
BlockTime uint32 `json:"-"`
CompleteHeight uint32 `json:"-"`
CompleteBlockTime uint32 `json:"-"`
InscriptionNumberStart int64 `json:"-"`
InscriptionNumberEnd int64 `json:"-"`
}
type BRC20TokenBalance struct {
Ticker string
PkScript string
OverallBalanceSafe *decimal.Decimal
OverallBalance *decimal.Decimal
TransferableBalance *decimal.Decimal
InvalidTransferList []*InscriptionBRC20TickTransferInfo
ValidTransferMap map[string]*InscriptionBRC20TickTransferInfo
History []*BRC20History
HistoryMint []*BRC20History
HistoryInscribeTransfer []*BRC20History
HistorySend []*BRC20History
HistoryReceive []*BRC20History
}
type BRC20History struct {
Type uint8 // inscribe-deploy/inscribe-mint/inscribe-transfer/transfer/send/receive
Valid bool
Inscription InscriptionBRC20TickInfoResp
TxId string
Idx uint32
Vout uint32
PkScriptFrom string
PkScriptTo string
Satoshi uint64
Amount string
OverallBalance string
TransferableBalance string
AvailableBalance string
Height uint32
TxIdx uint32
BlockTime uint32
func (d *InscriptionBRC20TickInfo) GetInscriptionId() string {
return fmt.Sprintf("%si%d", utils.HashString([]byte(d.TxId)), d.Idx)
}
func NewBRC20History(historyType uint8, isValid bool, isTransfer bool,
info *InscriptionBRC20TickInfo, bal *BRC20TokenBalance, data *InscriptionBRC20Data) *BRC20History {
history := &BRC20History{
Type: historyType,
Valid: isValid,
Inscription: InscriptionBRC20TickInfoResp{
Height: data.Height,
InscriptionNumber: info.InscriptionNumber,
InscriptionId: fmt.Sprintf("%si%d", utils.GetReversedStringHex(info.TxId), info.Idx),
func (in *InscriptionBRC20TickInfo) DeepCopy() (copy *InscriptionBRC20TickInfo) {
copy = &InscriptionBRC20TickInfo{
Tick: in.Tick,
SelfMint: in.SelfMint,
Data: in.Data,
Decimal: in.Decimal,
TxId: in.TxId,
Idx: in.Idx,
Vout: in.Vout,
Offset: in.Offset,
Satoshi: in.Satoshi,
PkScript: in.PkScript,
InscriptionNumber: in.InscriptionNumber,
CreateIdxKey: in.CreateIdxKey,
Height: in.Height,
TxIdx: in.TxIdx,
BlockTime: in.BlockTime,
// runtime value
Max: decimal.NewDecimalCopy(in.Max),
Limit: decimal.NewDecimalCopy(in.Limit),
TotalMinted: decimal.NewDecimalCopy(in.TotalMinted),
ConfirmedMinted: decimal.NewDecimalCopy(in.ConfirmedMinted),
ConfirmedMinted1h: decimal.NewDecimalCopy(in.ConfirmedMinted1h),
ConfirmedMinted24h: decimal.NewDecimalCopy(in.ConfirmedMinted24h),
Burned: decimal.NewDecimalCopy(in.Burned),
Amount: decimal.NewDecimalCopy(in.Amount),
MintTimes: in.MintTimes,
CompleteHeight: in.CompleteHeight,
CompleteBlockTime: in.CompleteBlockTime,
InscriptionNumberStart: in.InscriptionNumberStart,
InscriptionNumberEnd: in.InscriptionNumberEnd,
}
return copy
}
func NewInscriptionBRC20TickInfo(tick, operation string, data *InscriptionBRC20Data) *InscriptionBRC20TickInfo {
info := &InscriptionBRC20TickInfo{
Tick: tick,
Data: &InscriptionBRC20InfoResp{
BRC20Tick: tick,
Operation: operation,
},
Amount: info.Amount.String(),
Height: data.Height,
TxIdx: data.TxIdx,
BlockTime: data.BlockTime,
}
if isTransfer {
history.TxId = data.TxId
history.Vout = data.Vout
history.Idx = data.Idx
history.PkScriptFrom = info.PkScript
history.PkScriptTo = data.PkScript
history.Satoshi = data.Satoshi
if history.Satoshi == 0 {
history.PkScriptTo = history.PkScriptFrom
}
Decimal: 18,
} else {
history.TxId = info.TxId
history.Vout = info.Vout
history.Idx = info.Idx
history.PkScriptTo = info.PkScript
history.Satoshi = info.Satoshi
}
TxId: data.TxId,
Idx: data.Idx,
Vout: data.Vout,
Offset: data.Offset,
if bal != nil {
history.OverallBalance = bal.OverallBalance.String()
history.TransferableBalance = bal.TransferableBalance.String()
history.AvailableBalance = bal.OverallBalance.Sub(bal.TransferableBalance).String()
Satoshi: data.Satoshi,
PkScript: data.PkScript,
InscriptionNumber: data.InscriptionNumber,
CreateIdxKey: data.CreateIdxKey,
Height: data.Height,
TxIdx: data.TxIdx,
BlockTime: data.BlockTime,
}
return history
return info
}
// all history for user
type BRC20UserHistory struct {
History []uint32
}
// state of address for each tick, (balance and history)
type BRC20TokenBalance struct {
Ticker string
PkScript string
AvailableBalance *decimal.Decimal
AvailableBalanceSafe *decimal.Decimal
TransferableBalance *decimal.Decimal
ValidTransferMap map[string]*InscriptionBRC20TickInfo
History []uint32
HistoryMint []uint32
HistoryInscribeTransfer []uint32
HistorySend []uint32
HistoryReceive []uint32
}
func (bal *BRC20TokenBalance) OverallBalance() *decimal.Decimal {
return bal.AvailableBalance.Add(bal.TransferableBalance)
}
func (in *BRC20TokenBalance) DeepCopy() (tb *BRC20TokenBalance) {
tb = &BRC20TokenBalance{
Ticker: in.Ticker,
PkScript: in.PkScript,
AvailableBalanceSafe: decimal.NewDecimalCopy(in.AvailableBalanceSafe),
AvailableBalance: decimal.NewDecimalCopy(in.AvailableBalance),
TransferableBalance: decimal.NewDecimalCopy(in.TransferableBalance),
}
tb.ValidTransferMap = make(map[string]*InscriptionBRC20TickInfo, len(in.ValidTransferMap))
for k, v := range in.ValidTransferMap {
tb.ValidTransferMap[k] = v.DeepCopy()
}
tb.History = make([]uint32, len(in.History))
copy(tb.History, in.History)
tb.HistoryMint = make([]uint32, len(in.HistoryMint))
copy(tb.HistoryMint, in.HistoryMint)
tb.HistoryInscribeTransfer = make([]uint32, len(in.HistoryInscribeTransfer))
copy(tb.HistoryInscribeTransfer, in.HistoryInscribeTransfer)
tb.HistorySend = make([]uint32, len(in.HistorySend))
copy(tb.HistorySend, in.HistorySend)
tb.HistoryReceive = make([]uint32, len(in.HistoryReceive))
copy(tb.HistoryReceive, in.HistoryReceive)
return tb
}
// history inscription info
type InscriptionBRC20TickInfoResp struct {
Height uint32 `json:"-"`
Data *InscriptionBRC20InfoResp `json:"data"`
InscriptionNumber int64 `json:"inscriptionNumber"`
InscriptionId string `json:"inscriptionId"`
Satoshi uint64 `json:"satoshi"`
Confirmations int `json:"confirmations"`
}

18
model/module.go Normal file
View File

@@ -0,0 +1,18 @@
package model
// decode data
type InscriptionBRC20ModuleDeployContent struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
BRC20Name string `json:"name,omitempty"`
BRC20Source string `json:"source,omitempty"`
BRC20Init map[string]interface{} `json:"init,omitempty"`
}
type InscriptionBRC20ModuleWithdrawContent struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
Module string `json:"module,omitempty"`
Tick string `json:"tick,omitempty"`
Amount string `json:"amt,omitempty"`
}

38
model/store.go Normal file
View File

@@ -0,0 +1,38 @@
package model
import "github.com/unisat-wallet/libbrc20-indexer/decimal"
// module state store
type BRC20ModuleSwapInfoStore struct {
ID string // module id
Name string // module name
DeployerPkScript string // deployer
SequencerPkScript string // operator, sequencer
GasToPkScript string //
LpFeePkScript string //
FeeRateSwap string
GasTick string
History []*BRC20ModuleHistory // history for deploy, deposit, commit, quit
// runtime for commit
CommitInvalidMap map[string]struct{} // All invalid create commits
CommitIdMap map[string]struct{} // All valid create commits
CommitIdChainMap map[string]struct{} // All connected commits cannot be used as parents for subsequent commits again.
// token holders in module
// ticker of users in module [address][tick]balanceData
UsersTokenBalanceDataMap map[string]map[string]*BRC20ModuleTokenBalance
// swap
// lp token balance of address in module [pool][address]balance
LPTokenUsersBalanceMap map[string]map[string]*decimal.Decimal
// swap total balance
// total balance of pool in module [pool]balanceData
SwapPoolTotalBalanceDataMap map[string]*BRC20ModulePoolTotalBalance
// module deposit/withdraw state [tick]balanceData
ConditionalApproveStateBalanceDataMap map[string]*BRC20ModuleConditionalApproveStateBalance
}

780
model/swap.go Normal file
View File

@@ -0,0 +1,780 @@
package model
import (
"fmt"
"log"
"strings"
"github.com/unisat-wallet/libbrc20-indexer/decimal"
)
// decode data
type InscriptionBRC20ModuleDeploySwapContent struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
Name string `json:"name,omitempty"`
Source string `json:"source,omitempty"`
Init map[string]string `json:"init,omitempty"`
}
type InscriptionBRC20ModuleSwapApproveContent struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
Module string `json:"module,omitempty"`
Tick string `json:"tick,omitempty"`
Amount string `json:"amt,omitempty"`
}
type InscriptionBRC20ModuleSwapQuitContent struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
Module string `json:"module,omitempty"`
}
// check state
type SwapFunctionResultCheckStateForUser struct {
Address string `json:"address,omitempty"`
Tick string `json:"tick,omitempty"`
Balance string `json:"balance,omitempty"`
}
type SwapFunctionResultCheckStateForPool struct {
Pair string `json:"pair,omitempty"`
ReserveAmount0 string `json:"reserve0,omitempty"`
ReserveAmount1 string `json:"reserve1,omitempty"`
LPAmount string `json:"lp,omitempty"`
}
type SwapFunctionResultCheckState struct {
Users []SwapFunctionResultCheckStateForUser `json:"users,omitempty"`
Pools []SwapFunctionResultCheckStateForPool `json:"pools,omitempty"`
CommitId string `json:"commit,omitempty"`
FunctionIdx int `json:"function,omitempty"`
}
// load events
type BRC20ModuleHistoryInfoEvent struct {
Type string `json:"type"` // inscribe-deploy/inscribe-mint/inscribe-transfer/transfer/send/receive
Valid bool `json:"valid"`
TxIdHex string `json:"txid"`
Idx uint32 `json:"idx"` // inscription index
Vout uint32 `json:"vout"`
Offset uint64 `json:"offset"`
InscriptionNumber int64 `json:"inscriptionNumber"`
InscriptionId string `json:"inscriptionId"`
ContentType string `json:"contentType"`
ContentBody string `json:"contentBody"`
AddressFrom string `json:"from"`
AddressTo string `json:"to"`
Satoshi uint64 `json:"satoshi"`
Data *BRC20SwapHistoryCondApproveData `json:"data"`
Height uint32 `json:"height"`
TxIdx uint32 `json:"txidx"` // txidx in block
BlockHashHex string `json:"blockhash"`
BlockTime uint32 `json:"blocktime"`
}
// commit function data
type SwapFunctionData struct {
Address string `json:"addr,omitempty"`
Function string `json:"func,omitempty"`
Params []string `json:"params,omitempty"`
Timestamp uint `json:"ts,omitempty"`
Signature string `json:"sig,omitempty"`
ID string `json:"-"`
PkScript string `json:"-"`
}
type InscriptionBRC20ModuleSwapCommitContent struct {
Proto string `json:"p,omitempty"`
Operation string `json:"op,omitempty"`
Module string `json:"module,omitempty"`
Parent string `json:"parent,omitempty"`
GasPrice string `json:"gas_price,omitempty"`
Data []*SwapFunctionData `json:"data,omitempty"`
}
type TransferStateForConditionalApprove struct {
Tick string
From string
To string
Balance *decimal.Decimal
InscriptionId string
Max string // origin amount
}
func (d *TransferStateForConditionalApprove) DeepCopy() (copy *TransferStateForConditionalApprove) {
copy = &TransferStateForConditionalApprove{
Tick: d.Tick,
From: d.From,
To: d.To,
Balance: decimal.NewDecimalCopy(d.Balance), // maybe no need copy
InscriptionId: d.InscriptionId,
Max: d.Max,
}
return copy
}
type ApproveStateForConditionalApprove struct {
Module string
Tick string
Owner string
Balance *decimal.Decimal
ApproveInfo *InscriptionBRC20SwapConditionalApproveInfo
ToData *InscriptionBRC20Data
}
func (d *ApproveStateForConditionalApprove) DeepCopy() (copy *ApproveStateForConditionalApprove) {
copy = &ApproveStateForConditionalApprove{
Module: d.Module,
Tick: d.Tick,
Owner: d.Owner,
Balance: decimal.NewDecimalCopy(d.Balance), // maybe no need copy
ApproveInfo: d.ApproveInfo.DeepCopy(),
ToData: d.ToData,
}
return copy
}
type ConditionalApproveEvent struct {
Module string
Tick string
TransferInscriptionId string
TransferMax string
From string
To string
Amount *decimal.Decimal
Balance *decimal.Decimal
FromData InscriptionBRC20Data
ToData InscriptionBRC20Data
ApproveInfo *InscriptionBRC20SwapConditionalApproveInfo
}
func NewConditionalApproveEvent(senderPkScript, receiverPkScript string, amount, balance *decimal.Decimal,
data *InscriptionBRC20Data, approveInfo *InscriptionBRC20SwapConditionalApproveInfo,
transferInscriptionId, transferMax string) (event *ConditionalApproveEvent) {
fromData := *approveInfo.Data
fromData.PkScript = senderPkScript
toData := *data
toData.PkScript = receiverPkScript
// 一开始直接退回
return &ConditionalApproveEvent{
Module: approveInfo.Module,
Tick: approveInfo.Tick,
TransferInscriptionId: transferInscriptionId,
TransferMax: transferMax,
From: senderPkScript,
To: receiverPkScript,
Amount: amount,
Balance: balance,
FromData: fromData,
ToData: toData,
ApproveInfo: approveInfo,
}
}
// module state
type BRC20ModuleSwapInfo struct {
ID string // module id
Name string // module name
DeployerPkScript string // deployer
SequencerPkScript string // operator, sequencer
GasToPkScript string //
LpFeePkScript string //
FeeRateSwap string
GasTick string
History []*BRC20ModuleHistory // history for deploy, deposit, commit, quit
// runtime for commit
CommitInvalidMap map[string]struct{} // All invalid create commits
CommitIdMap map[string]struct{} // All valid create commits
CommitIdChainMap map[string]struct{} // All connected commits cannot be used as parents for subsequent commits again.
// token holders in module
// ticker of users in module [address][tick]balanceData
UsersTokenBalanceDataMap map[string]map[string]*BRC20ModuleTokenBalance
// token balance of address in module [tick][address]balanceData
TokenUsersBalanceDataMap map[string]map[string]*BRC20ModuleTokenBalance
// swap
// lp token balance of address in module [pool][address]balance
LPTokenUsersBalanceMap map[string]map[string]*decimal.Decimal
// lp token of users in module [address][pool]balance
UsersLPTokenBalanceMap map[string]map[string]*decimal.Decimal
// swap total balance
// total balance of pool in module [pool]balanceData
SwapPoolTotalBalanceDataMap map[string]*BRC20ModulePoolTotalBalance
// module deposit/withdraw state [tick]balanceData
ConditionalApproveStateBalanceDataMap map[string]*BRC20ModuleConditionalApproveStateBalance
// runtime for approve
ThisTxId string
TransferStatesForConditionalApprove []*TransferStateForConditionalApprove
ApproveStatesForConditionalApprove []*ApproveStateForConditionalApprove
}
func (m *BRC20ModuleSwapInfo) DeepCopy() (copy *BRC20ModuleSwapInfo) {
copy = &BRC20ModuleSwapInfo{
ID: m.ID,
Name: m.Name,
DeployerPkScript: m.DeployerPkScript, // deployer
SequencerPkScript: m.SequencerPkScript, // Sequencer
GasToPkScript: m.GasToPkScript,
LpFeePkScript: m.LpFeePkScript,
FeeRateSwap: m.FeeRateSwap,
GasTick: m.GasTick,
History: make([]*BRC20ModuleHistory, 0),
// runtime for commit
CommitInvalidMap: make(map[string]struct{}, 0),
CommitIdChainMap: make(map[string]struct{}, 0),
CommitIdMap: make(map[string]struct{}, 0),
// runtime for holders
// token holders in module
// ticker of users in module [address][tick]balanceData
UsersTokenBalanceDataMap: make(map[string]map[string]*BRC20ModuleTokenBalance, 0),
// token balance of address in module [tick][address]balanceData
TokenUsersBalanceDataMap: make(map[string]map[string]*BRC20ModuleTokenBalance, 0),
// swap
// lp token balance of address in module [pair][address]balance
LPTokenUsersBalanceMap: make(map[string]map[string]*decimal.Decimal, 0),
// lp token of users in module [address][pair]balance
UsersLPTokenBalanceMap: make(map[string]map[string]*decimal.Decimal, 0),
// swap total balance
// total balance of pool in module [pair]balanceData
SwapPoolTotalBalanceDataMap: make(map[string]*BRC20ModulePoolTotalBalance, 0),
ConditionalApproveStateBalanceDataMap: make(map[string]*BRC20ModuleConditionalApproveStateBalance, 0),
}
for _, h := range m.History {
copy.History = append(copy.History, h)
// fix: more history
}
// invalid commit
for k := range m.CommitInvalidMap {
copy.CommitInvalidMap[k] = struct{}{}
}
for k := range m.CommitIdChainMap {
copy.CommitIdChainMap[k] = struct{}{}
}
for k := range m.CommitIdMap {
copy.CommitIdMap[k] = struct{}{}
}
// user/tick: balance
for address, dataMap := range m.UsersTokenBalanceDataMap {
dataMapCopy := make(map[string]*BRC20ModuleTokenBalance, 0)
for tick, balance := range dataMap {
dataMapCopy[tick] = balance.DeepCopy()
}
copy.UsersTokenBalanceDataMap[address] = dataMapCopy
}
// tick/user: balance
for tick, dataMap := range m.TokenUsersBalanceDataMap {
dataMapCopy := make(map[string]*BRC20ModuleTokenBalance, 0)
for address := range dataMap {
dataMapCopy[address] = copy.UsersTokenBalanceDataMap[address][tick]
}
copy.TokenUsersBalanceDataMap[tick] = dataMapCopy
}
// user/pair: lpbalance
for address, dataMap := range m.UsersLPTokenBalanceMap {
dataMapCopy := make(map[string]*decimal.Decimal, 0)
for pair, balance := range dataMap {
dataMapCopy[pair] = decimal.NewDecimalCopy(balance)
}
copy.UsersLPTokenBalanceMap[address] = dataMapCopy
}
// pair/user: lpbalance
for pair, dataMap := range m.LPTokenUsersBalanceMap {
dataMapCopy := make(map[string]*decimal.Decimal, 0)
for address := range dataMap {
dataMapCopy[address] = copy.UsersLPTokenBalanceMap[address][pair]
}
copy.LPTokenUsersBalanceMap[pair] = dataMapCopy
}
// swap total balance
for pair, balance := range m.SwapPoolTotalBalanceDataMap {
copy.SwapPoolTotalBalanceDataMap[pair] = balance.DeepCopy()
}
// swap deposit/approve state balance
for tick, balance := range m.ConditionalApproveStateBalanceDataMap {
copy.ConditionalApproveStateBalanceDataMap[tick] = balance.DeepCopy()
}
// runtime for approve
copy.ThisTxId = m.ThisTxId
for _, v := range m.TransferStatesForConditionalApprove {
copy.TransferStatesForConditionalApprove = append(copy.TransferStatesForConditionalApprove, v.DeepCopy())
}
for _, v := range m.ApproveStatesForConditionalApprove {
copy.ApproveStatesForConditionalApprove = append(copy.ApproveStatesForConditionalApprove, v.DeepCopy())
}
return copy
}
func (m *BRC20ModuleSwapInfo) CherryPick(pickUsersPkScript, pickTokensTick, pickPoolsPair map[string]bool) (copy *BRC20ModuleSwapInfo) {
copy = &BRC20ModuleSwapInfo{
ID: m.ID,
Name: m.Name,
DeployerPkScript: m.DeployerPkScript, // deployer
SequencerPkScript: m.SequencerPkScript, // Sequencer
GasToPkScript: m.GasToPkScript,
LpFeePkScript: m.LpFeePkScript,
FeeRateSwap: m.FeeRateSwap,
GasTick: m.GasTick,
// runtime for commit
CommitIdChainMap: make(map[string]struct{}, 0),
CommitIdMap: make(map[string]struct{}, 0),
// runtime for holders
// token holders in module
// ticker of users in module [address][tick]balanceData
UsersTokenBalanceDataMap: make(map[string]map[string]*BRC20ModuleTokenBalance, 0),
// token balance of address in module [tick][address]balanceData
TokenUsersBalanceDataMap: make(map[string]map[string]*BRC20ModuleTokenBalance, 0),
// swap
// lp token balance of address in module [pair][address]balance
LPTokenUsersBalanceMap: make(map[string]map[string]*decimal.Decimal, 0),
// lp token of users in module [address][pair]balance
UsersLPTokenBalanceMap: make(map[string]map[string]*decimal.Decimal, 0),
// swap total balance
// total balance of pool in module [pair]balanceData
SwapPoolTotalBalanceDataMap: make(map[string]*BRC20ModulePoolTotalBalance, 0),
ConditionalApproveStateBalanceDataMap: make(map[string]*BRC20ModuleConditionalApproveStateBalance, 0),
}
for k := range m.CommitIdChainMap {
copy.CommitIdChainMap[k] = struct{}{}
}
for k := range m.CommitIdMap {
copy.CommitIdMap[k] = struct{}{}
}
// user/tick: balance
for address, dataMap := range m.UsersTokenBalanceDataMap {
dataMapCopy := make(map[string]*BRC20ModuleTokenBalance, 0)
for tick, balance := range dataMap {
dataMapCopy[tick] = balance.CherryPick()
}
copy.UsersTokenBalanceDataMap[address] = dataMapCopy
}
// tick/user: balance
for tick, dataMap := range m.TokenUsersBalanceDataMap {
dataMapCopy := make(map[string]*BRC20ModuleTokenBalance, 0)
for address := range dataMap {
dataMapCopy[address] = copy.UsersTokenBalanceDataMap[address][tick]
}
copy.TokenUsersBalanceDataMap[tick] = dataMapCopy
}
// user/pair: lpbalance
for address, dataMap := range m.UsersLPTokenBalanceMap {
dataMapCopy := make(map[string]*decimal.Decimal, 0)
for pair, balance := range dataMap {
dataMapCopy[pair] = decimal.NewDecimalCopy(balance)
}
copy.UsersLPTokenBalanceMap[address] = dataMapCopy
}
// pair/user: lpbalance
for pair, dataMap := range m.LPTokenUsersBalanceMap {
dataMapCopy := make(map[string]*decimal.Decimal, 0)
for address := range dataMap {
dataMapCopy[address] = copy.UsersLPTokenBalanceMap[address][pair]
}
copy.LPTokenUsersBalanceMap[pair] = dataMapCopy
}
// swap total balance
for pair, balance := range m.SwapPoolTotalBalanceDataMap {
copy.SwapPoolTotalBalanceDataMap[pair] = balance.CherryPick()
}
// swap deposit/approve state balance
// no need
return copy
}
func (moduleInfo *BRC20ModuleSwapInfo) GetTickConditionalApproveStateBalance(ticker string) (tokenBalance *BRC20ModuleConditionalApproveStateBalance) {
uniqueLowerTicker := strings.ToLower(ticker)
stateBalance, ok := moduleInfo.ConditionalApproveStateBalanceDataMap[uniqueLowerTicker]
if !ok {
stateBalance = &BRC20ModuleConditionalApproveStateBalance{Tick: ticker}
moduleInfo.ConditionalApproveStateBalanceDataMap[uniqueLowerTicker] = stateBalance
}
return stateBalance
}
func (moduleInfo *BRC20ModuleSwapInfo) GetUserTokenBalance(ticker, userPkScript string) (tokenBalance *BRC20ModuleTokenBalance) {
uniqueLowerTicker := strings.ToLower(ticker)
// get user's tokens to update
var usersTokens map[string]*BRC20ModuleTokenBalance
if tokens, ok := moduleInfo.UsersTokenBalanceDataMap[userPkScript]; !ok {
usersTokens = make(map[string]*BRC20ModuleTokenBalance, 0)
moduleInfo.UsersTokenBalanceDataMap[userPkScript] = usersTokens
} else {
usersTokens = tokens
}
// get tokenBalance to update
if tb, ok := usersTokens[uniqueLowerTicker]; !ok {
tokenBalance = &BRC20ModuleTokenBalance{Tick: ticker, PkScript: userPkScript}
usersTokens[uniqueLowerTicker] = tokenBalance
} else {
tokenBalance = tb
return tokenBalance
}
// set token's users
tokenUsers, ok := moduleInfo.TokenUsersBalanceDataMap[uniqueLowerTicker]
if !ok {
tokenUsers = make(map[string]*BRC20ModuleTokenBalance, 0)
moduleInfo.TokenUsersBalanceDataMap[uniqueLowerTicker] = tokenUsers
}
tokenUsers[userPkScript] = tokenBalance
return tokenBalance
}
// Generate matching approve events within the transaction when a transfer inscription transfer event occurs.
func (moduleInfo *BRC20ModuleSwapInfo) GenerateApproveEventsByTransfer(transState *TransferStateForConditionalApprove) (events []*ConditionalApproveEvent) {
balanceAmt := transState.Balance
for _, apprState := range moduleInfo.ApproveStatesForConditionalApprove {
// skip tick miss match
if apprState.Tick != transState.Tick {
continue
}
// skip to other user
if apprState.Owner != transState.To {
continue
}
// skip 0 approve balance
if apprState.Balance.Sign() == 0 {
continue
}
// skip 0 transfer balance
if balanceAmt.Sign() == 0 {
break
}
if apprState.Balance.Cmp(balanceAmt) <= 0 {
balanceAmt = balanceAmt.Sub(apprState.Balance)
senderPkScript := apprState.Owner
receiverPkScript := transState.From
event := NewConditionalApproveEvent(senderPkScript, receiverPkScript, apprState.Balance, nil, apprState.ToData, apprState.ApproveInfo, transState.InscriptionId, transState.Max)
events = append(events, event)
log.Printf("generate new approve event by transfer. rest match. id: %s", transState.InscriptionId)
log.Printf("generate new approve event. amt: %s", apprState.Balance.String())
apprState.Balance = nil
continue
} else {
apprState.Balance = apprState.Balance.Sub(balanceAmt)
senderPkScript := apprState.Owner
receiverPkScript := transState.From
event := NewConditionalApproveEvent(senderPkScript, receiverPkScript, balanceAmt, apprState.Balance, apprState.ToData, apprState.ApproveInfo, transState.InscriptionId, transState.Max)
events = append(events, event)
log.Printf("generate new approve event by transfer. partial match. id: %s", transState.InscriptionId)
log.Printf("generate new approve event. amt: %s", balanceAmt.String())
balanceAmt = nil
break
}
}
if balanceAmt.Sign() > 0 {
transState.Balance = balanceAmt
moduleInfo.TransferStatesForConditionalApprove = append(moduleInfo.TransferStatesForConditionalApprove, transState)
}
return events
}
// Generate a matching approve event within the transaction when an approve inscription transfer event occurs.
func (moduleInfo *BRC20ModuleSwapInfo) GenerateApproveEventsByApprove(owner string, balance *decimal.Decimal,
data *InscriptionBRC20Data, approveInfo *InscriptionBRC20SwapConditionalApproveInfo) (events []*ConditionalApproveEvent) {
balanceAmt := decimal.NewDecimalCopy(balance)
apprState := &ApproveStateForConditionalApprove{
Tick: approveInfo.Tick,
Owner: owner,
Balance: balanceAmt,
// fixme: object
Module: approveInfo.Module,
ApproveInfo: approveInfo,
ToData: data,
}
for _, transState := range moduleInfo.TransferStatesForConditionalApprove {
// approve balance
if balanceAmt.Sign() == 0 {
break
}
if transState.Tick != apprState.Tick {
continue
}
if transState.To != apprState.Owner {
continue
}
if transState.Balance.Sign() == 0 {
continue
}
if transState.Balance.Cmp(balanceAmt) <= 0 {
balanceAmt = balanceAmt.Sub(transState.Balance)
senderPkScript := owner
receiverPkScript := transState.From
event := NewConditionalApproveEvent(senderPkScript, receiverPkScript, transState.Balance, balanceAmt, data, approveInfo, transState.InscriptionId, transState.Max)
events = append(events, event)
log.Printf("generate new approve event by cond. rest match. id: %s", transState.InscriptionId)
log.Printf("generate new approve event. amt: %s", transState.Balance.String())
transState.Balance = nil
continue
} else {
transState.Balance = transState.Balance.Sub(balanceAmt)
senderPkScript := owner
receiverPkScript := transState.From
event := NewConditionalApproveEvent(senderPkScript, receiverPkScript, balanceAmt, nil, data, approveInfo, transState.InscriptionId, transState.Max)
events = append(events, event)
log.Printf("generate new approve event by cond. partial match. id: %s", transState.InscriptionId)
log.Printf("generate new approve event. amt: %s", balanceAmt.String())
balanceAmt = nil
break
}
}
// Remaining approve, written into state waiting for subsequent deduction
if balanceAmt.Sign() > 0 {
apprState.Balance = balanceAmt
moduleInfo.ApproveStatesForConditionalApprove = append(moduleInfo.ApproveStatesForConditionalApprove, apprState)
}
return events
}
// state of address for each tick, (balance and history)
type BRC20ModuleTokenBalance struct {
Tick string
PkScript string
// confirmed safe
SwapAccountBalanceSafe *decimal.Decimal
ModuleAccountBalanceSafe *decimal.Decimal
// with unconfirmed balance
SwapAccountBalance *decimal.Decimal
AvailableBalanceSafe *decimal.Decimal
AvailableBalance *decimal.Decimal
ApproveableBalance *decimal.Decimal
CondApproveableBalance *decimal.Decimal
WithdrawableBalance *decimal.Decimal
ValidConditionalApproveMap map[string]*InscriptionBRC20Data
ValidApproveMap map[string]*InscriptionBRC20Data
ValidWithdrawMap map[string]*InscriptionBRC20Data
History []*BRC20ModuleHistory
}
func (b *BRC20ModuleTokenBalance) String() string {
return fmt.Sprintf("%s", b.SwapAccountBalance.String())
}
func (bal *BRC20ModuleTokenBalance) ModuleBalance() *decimal.Decimal {
return bal.AvailableBalance.Add(
bal.ApproveableBalance).Add(
bal.CondApproveableBalance).Add(
bal.WithdrawableBalance)
}
func (in *BRC20ModuleTokenBalance) DeepCopy() *BRC20ModuleTokenBalance {
tb := &BRC20ModuleTokenBalance{
Tick: in.Tick,
PkScript: in.PkScript,
SwapAccountBalanceSafe: decimal.NewDecimalCopy(in.SwapAccountBalanceSafe),
ModuleAccountBalanceSafe: decimal.NewDecimalCopy(in.ModuleAccountBalanceSafe),
SwapAccountBalance: decimal.NewDecimalCopy(in.SwapAccountBalance),
AvailableBalanceSafe: decimal.NewDecimalCopy(in.AvailableBalanceSafe),
AvailableBalance: decimal.NewDecimalCopy(in.AvailableBalance),
ApproveableBalance: decimal.NewDecimalCopy(in.ApproveableBalance),
CondApproveableBalance: decimal.NewDecimalCopy(in.CondApproveableBalance),
WithdrawableBalance: decimal.NewDecimalCopy(in.WithdrawableBalance),
ValidConditionalApproveMap: make(map[string]*InscriptionBRC20Data, len(in.ValidConditionalApproveMap)),
ValidApproveMap: make(map[string]*InscriptionBRC20Data, len(in.ValidApproveMap)),
ValidWithdrawMap: make(map[string]*InscriptionBRC20Data, len(in.ValidWithdrawMap)),
}
for k, v := range in.ValidConditionalApproveMap {
data := *v
tb.ValidConditionalApproveMap[k] = &data
}
for k, v := range in.ValidApproveMap {
data := *v
tb.ValidApproveMap[k] = &data
}
for k, v := range in.ValidWithdrawMap {
data := *v
tb.ValidWithdrawMap[k] = &data
}
for _, h := range in.History {
tb.History = append(tb.History, h)
// fix: more history
}
// tb.History = make([]BRC20History, len(in.History))
// copy(tb.History, in.History)
return tb
}
func (in *BRC20ModuleTokenBalance) CherryPick() *BRC20ModuleTokenBalance {
tb := &BRC20ModuleTokenBalance{
Tick: in.Tick,
PkScript: in.PkScript,
SwapAccountBalanceSafe: decimal.NewDecimalCopy(in.SwapAccountBalanceSafe),
ModuleAccountBalanceSafe: decimal.NewDecimalCopy(in.ModuleAccountBalanceSafe),
SwapAccountBalance: decimal.NewDecimalCopy(in.SwapAccountBalance),
AvailableBalanceSafe: decimal.NewDecimalCopy(in.AvailableBalanceSafe),
AvailableBalance: decimal.NewDecimalCopy(in.AvailableBalance),
ApproveableBalance: decimal.NewDecimalCopy(in.ApproveableBalance),
CondApproveableBalance: decimal.NewDecimalCopy(in.CondApproveableBalance),
WithdrawableBalance: decimal.NewDecimalCopy(in.WithdrawableBalance),
}
return tb
}
// state of address for each tick, (balance and history)
type BRC20ModulePoolTotalBalance struct {
Tick [2]string
TickBalance [2]*decimal.Decimal
LpBalance *decimal.Decimal
LastRootK *decimal.Decimal
// history
History []*BRC20ModuleHistory
}
func (in *BRC20ModulePoolTotalBalance) DeepCopy() *BRC20ModulePoolTotalBalance {
tb := &BRC20ModulePoolTotalBalance{
Tick: in.Tick,
TickBalance: in.TickBalance,
LpBalance: decimal.NewDecimalCopy(in.LpBalance),
LastRootK: decimal.NewDecimalCopy(in.LastRootK),
}
for _, h := range in.History {
tb.History = append(tb.History, h)
// fix: more history
}
return tb
}
func (in *BRC20ModulePoolTotalBalance) CherryPick() *BRC20ModulePoolTotalBalance {
tb := &BRC20ModulePoolTotalBalance{
Tick: in.Tick,
TickBalance: in.TickBalance,
LpBalance: decimal.NewDecimalCopy(in.LpBalance),
LastRootK: decimal.NewDecimalCopy(in.LastRootK),
}
return tb
}
type InscriptionBRC20SwapInfo struct {
Module string
Tick string
Amount *decimal.Decimal
Data *InscriptionBRC20Data
}
type InscriptionBRC20SwapConditionalApproveInfo struct {
Module string
Tick string
Amount *decimal.Decimal // current amt
Balance *decimal.Decimal // current balance
HasMoved bool // has moved
OwnerPkScript string // owner
DelegatorPkScript string // delegator
Data *InscriptionBRC20Data
}
func (d *InscriptionBRC20SwapConditionalApproveInfo) DeepCopy() (copy *InscriptionBRC20SwapConditionalApproveInfo) {
copy = &InscriptionBRC20SwapConditionalApproveInfo{
Module: d.Module,
Tick: d.Tick,
Amount: decimal.NewDecimalCopy(d.Amount), // maybe no need copy
Balance: decimal.NewDecimalCopy(d.Balance), // maybe no need copy
HasMoved: d.HasMoved,
OwnerPkScript: d.OwnerPkScript,
DelegatorPkScript: d.DelegatorPkScript,
Data: d.Data,
}
return copy
}
// history inscription info
type InscriptionBRC20SwapInfoResp struct {
ContentBody []byte `json:"content"`
InscriptionNumber int64 `json:"inscriptionNumber"`
InscriptionId string `json:"inscriptionId"`
Height uint32 `json:"-"`
Confirmations int `json:"confirmations"`
}

155
utils/bip322/verify.go Normal file
View File

@@ -0,0 +1,155 @@
package bip322
import (
"crypto/sha256"
"fmt"
"github.com/btcsuite/btcd/btcutil"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/unisat-wallet/libbrc20-indexer/utils"
)
func GetSha256(data []byte) (hash []byte) {
sha := sha256.New()
sha.Write(data[:])
hash = sha.Sum(nil)
return
}
func GetTagSha256(data []byte) (hash []byte) {
tag := []byte("BIP0322-signed-message")
hashTag := GetSha256(tag)
var msg []byte
msg = append(msg, hashTag...)
msg = append(msg, hashTag...)
msg = append(msg, data...)
return GetSha256(msg)
}
func PrepareTx(pkScript []byte, message string) (toSign *wire.MsgTx, err error) {
// Create a new transaction to spend
toSpend := wire.NewMsgTx(0)
// Decode the message hash
messageHash := GetTagSha256([]byte(message))
// Create the script for to_spend
builder := txscript.NewScriptBuilder()
builder.AddOp(txscript.OP_0)
builder.AddData(messageHash)
scriptSig, err := builder.Script()
if err != nil {
return nil, err
}
// Create a TxIn with the outpoint 000...000:FFFFFFFF
prevOutHash, _ := chainhash.NewHashFromStr("0000000000000000000000000000000000000000000000000000000000000000")
prevOut := wire.NewOutPoint(prevOutHash, wire.MaxPrevOutIndex)
txIn := wire.NewTxIn(prevOut, scriptSig, nil)
txIn.Sequence = 0
toSpend.AddTxIn(txIn)
toSpend.AddTxOut(wire.NewTxOut(0, pkScript))
// Create a transaction for to_sign
toSign = wire.NewMsgTx(0)
hash := toSpend.TxHash()
prevOutSpend := wire.NewOutPoint((*chainhash.Hash)(hash.CloneBytes()), 0)
txSignIn := wire.NewTxIn(prevOutSpend, nil, nil)
txSignIn.Sequence = 0
toSign.AddTxIn(txSignIn)
// Create the script for to_sign
builderPk := txscript.NewScriptBuilder()
builderPk.AddOp(txscript.OP_RETURN)
scriptPk, err := builderPk.Script()
if err != nil {
return nil, err
}
toSign.AddTxOut(wire.NewTxOut(0, scriptPk))
return toSign, nil
}
// VerifySignature
// signature: 64B, pkScript: 33B, message: any
func VerifySignature(witness wire.TxWitness, pkScript []byte, message string) bool {
toSign, err := PrepareTx(pkScript, message)
if err != nil {
fmt.Println("verifying signature, PrepareTx failed:", err)
return false
}
toSign.TxIn[0].Witness = witness
prevFetcher := txscript.NewCannedPrevOutputFetcher(
pkScript, 0,
)
hashCache := txscript.NewTxSigHashes(toSign, prevFetcher)
vm, err := txscript.NewEngine(pkScript, toSign, 0, txscript.StandardVerifyFlags, nil, hashCache, 0, prevFetcher)
if err != nil {
return false
}
if err := vm.Execute(); err != nil {
return false
}
return true
}
func SignSignatureTaproot(pkey, message string) (witness wire.TxWitness, pkScript []byte, err error) {
decodedWif, err := btcutil.DecodeWIF(pkey)
if err != nil {
return nil, nil, err
}
privKey := decodedWif.PrivKey
pubKey := txscript.ComputeTaprootKeyNoScript(privKey.PubKey())
pkScript, err = utils.PayToTaprootScript(pubKey)
toSign, err := PrepareTx(pkScript, message)
if err != nil {
return nil, nil, err
}
prevFetcher := txscript.NewCannedPrevOutputFetcher(
pkScript, 0,
)
sigHashes := txscript.NewTxSigHashes(toSign, prevFetcher)
witness, err = txscript.TaprootWitnessSignature(
toSign, sigHashes, 0, 0, pkScript,
txscript.SigHashDefault, privKey,
)
return witness, pkScript, nil
}
func SignSignatureP2WPKH(pkey, message string) (witness wire.TxWitness, pkScript []byte, err error) {
decodedWif, err := btcutil.DecodeWIF(pkey)
if err != nil {
return nil, nil, err
}
privKey := decodedWif.PrivKey
pubKey := privKey.PubKey()
pkScript, err = utils.PayToWitnessScript(pubKey)
toSign, err := PrepareTx(pkScript, message)
if err != nil {
return nil, nil, err
}
prevFetcher := txscript.NewCannedPrevOutputFetcher(
pkScript, 0,
)
sigHashes := txscript.NewTxSigHashes(toSign, prevFetcher)
witness, err = txscript.WitnessSignature(toSign, sigHashes,
0, 0, pkScript, txscript.SigHashAll,
privKey, true)
if err != nil {
return nil, nil, err
}
return witness, pkScript, nil
}

571
utils/script/compress.go Normal file
View File

@@ -0,0 +1,571 @@
// Copyright (c) 2015-2016 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package script
import "errors"
// -----------------------------------------------------------------------------
// A variable length quantity (VLQ) is an encoding that uses an arbitrary number
// of binary octets to represent an arbitrarily large integer. The scheme
// employs a most significant byte (MSB) base-128 encoding where the high bit in
// each byte indicates whether or not the byte is the final one. In addition,
// to ensure there are no redundant encodings, an offset is subtracted every
// time a group of 7 bits is shifted out. Therefore each integer can be
// represented in exactly one way, and each representation stands for exactly
// one integer.
//
// Another nice property of this encoding is that it provides a compact
// representation of values that are typically used to indicate sizes. For
// example, the values 0 - 127 are represented with a single byte, 128 - 16511
// with two bytes, and 16512 - 2113663 with three bytes.
//
// While the encoding allows arbitrarily large integers, it is artificially
// limited in this code to an unsigned 64-bit integer for efficiency purposes.
//
// Example encodings:
// 0 -> [0x00]
// 127 -> [0x7f] * Max 1-byte value
// 128 -> [0x80 0x00]
// 129 -> [0x80 0x01]
// 255 -> [0x80 0x7f]
// 256 -> [0x81 0x00]
// 16511 -> [0xff 0x7f] * Max 2-byte value
// 16512 -> [0x80 0x80 0x00]
// 32895 -> [0x80 0xff 0x7f]
// 2113663 -> [0xff 0xff 0x7f] * Max 3-byte value
// 270549119 -> [0xff 0xff 0xff 0x7f] * Max 4-byte value
// 2^64-1 -> [0x80 0xfe 0xfe 0xfe 0xfe 0xfe 0xfe 0xfe 0xfe 0x7f]
//
// References:
// https://en.wikipedia.org/wiki/Variable-length_quantity
// http://www.codecodex.com/wiki/Variable-Length_Integers
// -----------------------------------------------------------------------------
// serializeSizeVLQ returns the number of bytes it would take to serialize the
// passed number as a variable-length quantity according to the format described
// above.
func serializeSizeVLQ(n uint64) int {
size := 1
for ; n > 0x7f; n = (n >> 7) - 1 {
size++
}
return size
}
// putVLQ serializes the provided number to a variable-length quantity according
// to the format described above and returns the number of bytes of the encoded
// value. The result is placed directly into the passed byte slice which must
// be at least large enough to handle the number of bytes returned by the
// serializeSizeVLQ function or it will panic.
func PutVLQ(target []byte, n uint64) int {
offset := 0
for ; ; offset++ {
// The high bit is set when another byte follows.
highBitMask := byte(0x80)
if offset == 0 {
highBitMask = 0x00
}
target[offset] = byte(n&0x7f) | highBitMask
if n <= 0x7f {
break
}
n = (n >> 7) - 1
}
// Reverse the bytes so it is MSB-encoded.
for i, j := 0, offset; i < j; i, j = i+1, j-1 {
target[i], target[j] = target[j], target[i]
}
return offset + 1
}
// DeserializeVLQ deserializes the provided variable-length quantity according
// to the format described above. It also returns the number of bytes
// deserialized.
func DeserializeVLQ(serialized []byte) (uint64, int) {
var n uint64
var size int
for _, val := range serialized {
size++
n = (n << 7) | uint64(val&0x7f)
if val&0x80 != 0x80 {
break
}
n++
}
return n, size
}
// -----------------------------------------------------------------------------
// In order to reduce the size of stored scripts, a domain specific compression
// algorithm is used which recognizes standard scripts and stores them using
// less bytes than the original script. The compression algorithm used here was
// obtained from Bitcoin Core, so all credits for the algorithm go to it.
//
// The general serialized format is:
//
// <script size or type><script data>
//
// Field Type Size
// script size or type VLQ variable
// script data []byte variable
//
// The specific serialized format for each recognized standard script is:
//
// - Pay-to-pubkey-hash: (21 bytes) - <0><20-byte pubkey hash>
// - Pay-to-script-hash: (21 bytes) - <1><20-byte script hash>
// - Pay-to-pubkey**: (33 bytes) - <2, 3, 4, or 5><32-byte pubkey X value>
// 2, 3 = compressed pubkey with bit 0 specifying the y coordinate to use
// 4, 5 = uncompressed pubkey with bit 0 specifying the y coordinate to use
// ** Only valid public keys starting with 0x02, 0x03, and 0x04 are supported.
//
// Any scripts which are not recognized as one of the aforementioned standard
// scripts are encoded using the general serialized format and encode the script
// size as the sum of the actual size of the script and the number of special
// cases.
// -----------------------------------------------------------------------------
// The following constants specify the special constants used to identify a
// special script type in the domain-specific compressed script encoding.
//
// NOTE: This section specifically does not use iota since these values are
// serialized and must be stable for long-term storage.
const (
// cstPayToPubKeyHash identifies a compressed pay-to-pubkey-hash script.
cstPayToPubKeyHash = 0
// cstPayToScriptHash identifies a compressed pay-to-script-hash script.
cstPayToScriptHash = 1
// cstPayToPubKeyComp2 identifies a compressed pay-to-pubkey script to
// a compressed pubkey. Bit 0 specifies which y-coordinate to use
// to reconstruct the full uncompressed pubkey.
cstPayToPubKeyComp2 = 2
// cstPayToPubKeyComp3 identifies a compressed pay-to-pubkey script to
// a compressed pubkey. Bit 0 specifies which y-coordinate to use
// to reconstruct the full uncompressed pubkey.
cstPayToPubKeyComp3 = 3
// cstPayToPubKeyUncomp4 identifies a compressed pay-to-pubkey script to
// an uncompressed pubkey. Bit 0 specifies which y-coordinate to use
// to reconstruct the full uncompressed pubkey.
cstPayToPubKeyUncomp4 = 4
// cstPayToPubKeyUncomp5 identifies a compressed pay-to-pubkey script to
// an uncompressed pubkey. Bit 0 specifies which y-coordinate to use
// to reconstruct the full uncompressed pubkey.
cstPayToPubKeyUncomp5 = 5
// numSpecialScripts is the number of special scripts recognized by the
// domain-specific script compression algorithm.
numSpecialScripts = 6
)
// isPubKeyHash returns whether or not the passed public key script is a
// standard pay-to-pubkey-hash script along with the pubkey hash it is paying to
// if it is.
func isPubKeyHash(script []byte) (bool, []byte) {
if len(script) == 25 && script[0] == OP_DUP &&
script[1] == OP_HASH160 &&
script[2] == OP_DATA_20 &&
script[23] == OP_EQUALVERIFY &&
script[24] == OP_CHECKSIG {
return true, script[3:23]
}
return false, nil
}
// isScriptHash returns whether or not the passed public key script is a
// standard pay-to-script-hash script along with the script hash it is paying to
// if it is.
func isScriptHash(script []byte) (bool, []byte) {
if len(script) == 23 && script[0] == OP_HASH160 &&
script[1] == OP_DATA_20 &&
script[22] == OP_EQUAL {
return true, script[2:22]
}
return false, nil
}
// isPubKey returns whether or not the passed public key script is a standard
// pay-to-pubkey script that pays to a valid compressed or uncompressed public
// key along with the serialized pubkey it is paying to if it is.
//
// NOTE: This function ensures the public key is actually valid since the
// compression algorithm requires valid pubkeys. It does not support hybrid
// pubkeys. This means that even if the script has the correct form for a
// pay-to-pubkey script, this function will only return true when it is paying
// to a valid compressed or uncompressed pubkey.
func isPubKey(script []byte) (bool, []byte) {
// Pay-to-compressed-pubkey script.
if len(script) == 35 && script[0] == OP_DATA_33 &&
script[34] == OP_CHECKSIG && (script[1] == 0x02 ||
script[1] == 0x03) {
// Ensure the public key is valid.
serializedPubKey := script[1:34]
return true, serializedPubKey
}
// Pay-to-uncompressed-pubkey script.
if len(script) == 67 && script[0] == OP_DATA_65 &&
script[66] == OP_CHECKSIG && script[1] == 0x04 {
// Ensure the public key is valid.
serializedPubKey := script[1:66]
return true, serializedPubKey
}
return false, nil
}
// compressedScriptSize returns the number of bytes the passed script would take
// when encoded with the domain specific compression algorithm described above.
func compressedScriptSize(pkScript []byte) int {
// Pay-to-pubkey-hash script.
if valid, _ := isPubKeyHash(pkScript); valid {
return 21
}
// Pay-to-script-hash script.
if valid, _ := isScriptHash(pkScript); valid {
return 21
}
// Pay-to-pubkey (compressed or uncompressed) script.
if valid, serializedPubKey := isPubKey(pkScript); valid {
if serializedPubKey[0] == 0x04 {
return 65
}
return 33
}
// When none of the above special cases apply, encode the script as is
// preceded by the sum of its size and the number of special cases
// encoded as a variable length quantity.
return serializeSizeVLQ(uint64(len(pkScript)+numSpecialScripts)) +
len(pkScript)
}
// DecodeCompressedScriptSize treats the passed serialized bytes as a compressed
// script, possibly followed by other data, and returns the number of bytes it
// occupies taking into account the special encoding of the script size by the
// domain specific compression algorithm described above.
func DecodeCompressedScriptSize(serialized []byte) int {
scriptSize, bytesRead := DeserializeVLQ(serialized)
if bytesRead == 0 {
return 0
}
switch scriptSize {
case cstPayToPubKeyHash:
return 21
case cstPayToScriptHash:
return 21
case cstPayToPubKeyComp2, cstPayToPubKeyComp3:
return 33
case cstPayToPubKeyUncomp4, cstPayToPubKeyUncomp5:
return 65
}
scriptSize -= numSpecialScripts
scriptSize += uint64(bytesRead)
return int(scriptSize)
}
// PutCompressedScript compresses the passed script according to the domain
// specific compression algorithm described above directly into the passed
// target byte slice. The target byte slice must be at least large enough to
// handle the number of bytes returned by the compressedScriptSize function or
// it will panic.
func PutCompressedScript(target, pkScript []byte) int {
// Pay-to-pubkey-hash script.
if valid, hash := isPubKeyHash(pkScript); valid {
target[0] = cstPayToPubKeyHash
copy(target[1:21], hash)
return 21
}
// Pay-to-script-hash script.
if valid, hash := isScriptHash(pkScript); valid {
target[0] = cstPayToScriptHash
copy(target[1:21], hash)
return 21
}
// Pay-to-pubkey (compressed or uncompressed) script.
if valid, serializedPubKey := isPubKey(pkScript); valid {
pubKeyFormat := serializedPubKey[0]
target[0] = pubKeyFormat
switch pubKeyFormat {
case 0x02, 0x03:
copy(target[1:33], serializedPubKey[1:33])
return 33
case 0x04:
// Encode the oddness of the serialized pubkey into the
// compressed script type.
copy(target[1:65], serializedPubKey[1:65])
return 65
}
}
// When none of the above special cases apply, encode the unmodified
// script preceded by the sum of its size and the number of special
// cases encoded as a variable length quantity.
encodedSize := uint64(len(pkScript) + numSpecialScripts)
vlqSizeLen := PutVLQ(target, encodedSize)
copy(target[vlqSizeLen:], pkScript)
return vlqSizeLen + len(pkScript)
}
// DecompressScript returns the original script obtained by decompressing the
// passed compressed script according to the domain specific compression
// algorithm described above.
//
// NOTE: The script parameter must already have been proven to be long enough
// to contain the number of bytes returned by DecodeCompressedScriptSize or it
// will panic. This is acceptable since it is only an internal function.
func DecompressScript(compressedPkScript []byte) []byte {
// In practice this function will not be called with a zero-length or
// nil script since the nil script encoding includes the length, however
// the code below assumes the length exists, so just return nil now if
// the function ever ends up being called with a nil script in the
// future.
if len(compressedPkScript) == 0 {
return nil
}
// Decode the script size and examine it for the special cases.
encodedScriptSize, bytesRead := DeserializeVLQ(compressedPkScript)
switch encodedScriptSize {
// Pay-to-pubkey-hash script. The resulting script is:
// <OP_DUP><OP_HASH160><20 byte hash><OP_EQUALVERIFY><OP_CHECKSIG>
case cstPayToPubKeyHash:
pkScript := make([]byte, 25)
pkScript[0] = OP_DUP
pkScript[1] = OP_HASH160
pkScript[2] = OP_DATA_20
copy(pkScript[3:], compressedPkScript[bytesRead:bytesRead+20])
pkScript[23] = OP_EQUALVERIFY
pkScript[24] = OP_CHECKSIG
return pkScript
// Pay-to-script-hash script. The resulting script is:
// <OP_HASH160><20 byte script hash><OP_EQUAL>
case cstPayToScriptHash:
pkScript := make([]byte, 23)
pkScript[0] = OP_HASH160
pkScript[1] = OP_DATA_20
copy(pkScript[2:], compressedPkScript[bytesRead:bytesRead+20])
pkScript[22] = OP_EQUAL
return pkScript
// Pay-to-compressed-pubkey script. The resulting script is:
// <OP_DATA_33><33 byte compressed pubkey><OP_CHECKSIG>
case cstPayToPubKeyComp2, cstPayToPubKeyComp3:
pkScript := make([]byte, 35)
pkScript[0] = OP_DATA_33
pkScript[1] = byte(encodedScriptSize)
copy(pkScript[2:], compressedPkScript[bytesRead:bytesRead+32])
pkScript[34] = OP_CHECKSIG
return pkScript
// Pay-to-uncompressed-pubkey script. The resulting script is:
// <OP_DATA_65><65 byte uncompressed pubkey><OP_CHECKSIG>
case cstPayToPubKeyUncomp4, cstPayToPubKeyUncomp5:
if len(compressedPkScript[bytesRead:]) < 64 {
return nil
}
pkScript := make([]byte, 67)
pkScript[0] = OP_DATA_65
pkScript[1] = byte(encodedScriptSize)
copy(pkScript[2:], compressedPkScript[bytesRead:bytesRead+64])
pkScript[66] = OP_CHECKSIG
return pkScript
}
// When none of the special cases apply, the script was encoded using
// the general format, so reduce the script size by the number of
// special cases and return the unmodified script.
scriptSize := int(encodedScriptSize - numSpecialScripts)
pkScript := make([]byte, scriptSize)
copy(pkScript, compressedPkScript[bytesRead:bytesRead+scriptSize])
return pkScript
}
// -----------------------------------------------------------------------------
// In order to reduce the size of stored amounts, a domain specific compression
// algorithm is used which relies on there typically being a lot of zeroes at
// end of the amounts. The compression algorithm used here was obtained from
// Bitcoin Core, so all credits for the algorithm go to it.
//
// While this is simply exchanging one uint64 for another, the resulting value
// for typical amounts has a much smaller magnitude which results in fewer bytes
// when encoded as variable length quantity. For example, consider the amount
// of 0.1 BTC which is 10000000 satoshi. Encoding 10000000 as a VLQ would take
// 4 bytes while encoding the compressed value of 8 as a VLQ only takes 1 byte.
//
// Essentially the compression is achieved by splitting the value into an
// exponent in the range [0-9] and a digit in the range [1-9], when possible,
// and encoding them in a way that can be decoded. More specifically, the
// encoding is as follows:
// - 0 is 0
// - Find the exponent, e, as the largest power of 10 that evenly divides the
// value up to a maximum of 9
// - When e < 9, the final digit can't be 0 so store it as d and remove it by
// dividing the value by 10 (call the result n). The encoded value is thus:
// 1 + 10*(9*n + d-1) + e
// - When e==9, the only thing known is the amount is not 0. The encoded value
// is thus:
// 1 + 10*(n-1) + e == 10 + 10*(n-1)
//
// Example encodings:
// (The numbers in parenthesis are the number of bytes when serialized as a VLQ)
// 0 (1) -> 0 (1) * 0.00000000 BTC
// 1000 (2) -> 4 (1) * 0.00001000 BTC
// 10000 (2) -> 5 (1) * 0.00010000 BTC
// 12345678 (4) -> 111111101(4) * 0.12345678 BTC
// 50000000 (4) -> 47 (1) * 0.50000000 BTC
// 100000000 (4) -> 9 (1) * 1.00000000 BTC
// 500000000 (5) -> 49 (1) * 5.00000000 BTC
// 1000000000 (5) -> 10 (1) * 10.00000000 BTC
// -----------------------------------------------------------------------------
// CompressTxOutAmount compresses the passed amount according to the domain
// specific compression algorithm described above.
func CompressTxOutAmount(amount uint64) uint64 {
// No need to do any work if it's zero.
if amount == 0 {
return 0
}
// Find the largest power of 10 (max of 9) that evenly divides the
// value.
exponent := uint64(0)
for amount%10 == 0 && exponent < 9 {
amount /= 10
exponent++
}
// The compressed result for exponents less than 9 is:
// 1 + 10*(9*n + d-1) + e
if exponent < 9 {
lastDigit := amount % 10
amount /= 10
return 1 + 10*(9*amount+lastDigit-1) + exponent
}
// The compressed result for an exponent of 9 is:
// 1 + 10*(n-1) + e == 10 + 10*(n-1)
return 10 + 10*(amount-1)
}
// DecompressTxOutAmount returns the original amount the passed compressed
// amount represents according to the domain specific compression algorithm
// described above.
func DecompressTxOutAmount(amount uint64) uint64 {
// No need to do any work if it's zero.
if amount == 0 {
return 0
}
// The decompressed amount is either of the following two equations:
// x = 1 + 10*(9*n + d - 1) + e
// x = 1 + 10*(n - 1) + 9
amount--
// The decompressed amount is now one of the following two equations:
// x = 10*(9*n + d - 1) + e
// x = 10*(n - 1) + 9
exponent := amount % 10
amount /= 10
// The decompressed amount is now one of the following two equations:
// x = 9*n + d - 1 | where e < 9
// x = n - 1 | where e = 9
n := uint64(0)
if exponent < 9 {
lastDigit := amount%9 + 1
amount /= 9
n = amount*10 + lastDigit
} else {
n = amount + 1
}
// Apply the exponent.
for ; exponent > 0; exponent-- {
n *= 10
}
return n
}
// -----------------------------------------------------------------------------
// Compressed transaction outputs consist of an amount and a public key script
// both compressed using the domain specific compression algorithms previously
// described.
//
// The serialized format is:
//
// <compressed amount><compressed script>
//
// Field Type Size
// compressed amount VLQ variable
// compressed script []byte variable
// -----------------------------------------------------------------------------
// compressedTxOutSize returns the number of bytes the passed transaction output
// fields would take when encoded with the format described above.
func compressedTxOutSize(amount uint64, pkScript []byte) int {
return serializeSizeVLQ(CompressTxOutAmount(amount)) +
compressedScriptSize(pkScript)
}
// putCompressedTxOut compresses the passed amount and script according to their
// domain specific compression algorithms and encodes them directly into the
// passed target byte slice with the format described above. The target byte
// slice must be at least large enough to handle the number of bytes returned by
// the compressedTxOutSize function or it will panic.
func putCompressedTxOut(target []byte, amount uint64, pkScript []byte) int {
offset := PutVLQ(target, CompressTxOutAmount(amount))
offset += PutCompressedScript(target[offset:], pkScript)
return offset
}
// decodeCompressedTxOut decodes the passed compressed txout, possibly followed
// by other data, into its uncompressed amount and script and returns them along
// with the number of bytes they occupied prior to decompression.
func decodeCompressedTxOut(serialized []byte) (uint64, []byte, int, error) {
// Deserialize the compressed amount and ensure there are bytes
// remaining for the compressed script.
compressedAmount, bytesRead := DeserializeVLQ(serialized)
if bytesRead >= len(serialized) {
return 0, nil, bytesRead, errors.New("unexpected end of data after compressed amount")
}
// Decode the compressed script size and ensure there are enough bytes
// left in the slice for it.
scriptSize := DecodeCompressedScriptSize(serialized[bytesRead:])
if len(serialized[bytesRead:]) < scriptSize {
return 0, nil, bytesRead, errors.New("unexpected end of data after script size")
}
// Decompress and return the amount and script.
amount := DecompressTxOutAmount(compressedAmount)
script := DecompressScript(serialized[bytesRead : bytesRead+scriptSize])
return amount, script, bytesRead + scriptSize, nil
}

View File

@@ -0,0 +1,446 @@
// Copyright (c) 2015-2016 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package script
import (
"bytes"
"encoding/hex"
"testing"
)
// hexToBytes converts the passed hex string into bytes and will panic if there
// is an error. This is only provided for the hard-coded constants so errors in
// the source code can be detected. It will only (and must only) be called with
// hard-coded values.
func hexToBytes(s string) []byte {
b, err := hex.DecodeString(s)
if err != nil {
panic("invalid hex in source file: " + s)
}
return b
}
// TestVLQ ensures the variable length quantity serialization, deserialization,
// and size calculation works as expected.
func TestVLQ(t *testing.T) {
t.Parallel()
tests := []struct {
val uint64
serialized []byte
}{
{0, hexToBytes("00")},
{1, hexToBytes("01")},
{127, hexToBytes("7f")},
{128, hexToBytes("8000")},
{129, hexToBytes("8001")},
{255, hexToBytes("807f")},
{256, hexToBytes("8100")},
{16383, hexToBytes("fe7f")},
{16384, hexToBytes("ff00")},
{16511, hexToBytes("ff7f")}, // Max 2-byte value
{16512, hexToBytes("808000")},
{16513, hexToBytes("808001")},
{16639, hexToBytes("80807f")},
{32895, hexToBytes("80ff7f")},
{2113663, hexToBytes("ffff7f")}, // Max 3-byte value
{2113664, hexToBytes("80808000")},
{270549119, hexToBytes("ffffff7f")}, // Max 4-byte value
{270549120, hexToBytes("8080808000")},
{2147483647, hexToBytes("86fefefe7f")},
{2147483648, hexToBytes("86fefeff00")},
{4294967295, hexToBytes("8efefefe7f")}, // Max uint32, 5 bytes
// Max uint64, 10 bytes
{18446744073709551615, hexToBytes("80fefefefefefefefe7f")},
}
for _, test := range tests {
// Ensure the function to calculate the serialized size without
// actually serializing the value is calculated properly.
gotSize := serializeSizeVLQ(test.val)
if gotSize != len(test.serialized) {
t.Errorf("serializeSizeVLQ: did not get expected size "+
"for %d - got %d, want %d", test.val, gotSize,
len(test.serialized))
continue
}
// Ensure the value serializes to the expected bytes.
gotBytes := make([]byte, gotSize)
gotBytesWritten := PutVLQ(gotBytes, test.val)
if !bytes.Equal(gotBytes, test.serialized) {
t.Errorf("PutVLQUnchecked: did not get expected bytes "+
"for %d - got %x, want %x", test.val, gotBytes,
test.serialized)
continue
}
if gotBytesWritten != len(test.serialized) {
t.Errorf("PutVLQUnchecked: did not get expected number "+
"of bytes written for %d - got %d, want %d",
test.val, gotBytesWritten, len(test.serialized))
continue
}
// Ensure the serialized bytes deserialize to the expected
// value.
gotVal, gotBytesRead := DeserializeVLQ(test.serialized)
if gotVal != test.val {
t.Errorf("DeserializeVLQ: did not get expected value "+
"for %x - got %d, want %d", test.serialized,
gotVal, test.val)
continue
}
if gotBytesRead != len(test.serialized) {
t.Errorf("DeserializeVLQ: did not get expected number "+
"of bytes read for %d - got %d, want %d",
test.serialized, gotBytesRead,
len(test.serialized))
continue
}
}
}
// TestScriptCompression ensures the domain-specific script compression and
// decompression works as expected.
func TestScriptCompression(t *testing.T) {
t.Parallel()
tests := []struct {
name string
uncompressed []byte
compressed []byte
}{
{
name: "nil",
uncompressed: nil,
compressed: hexToBytes("06"),
},
{
name: "pay-to-pubkey-hash 1",
uncompressed: hexToBytes("76a9141018853670f9f3b0582c5b9ee8ce93764ac32b9388ac"),
compressed: hexToBytes("001018853670f9f3b0582c5b9ee8ce93764ac32b93"),
},
{
name: "pay-to-pubkey-hash 2",
uncompressed: hexToBytes("76a914e34cce70c86373273efcc54ce7d2a491bb4a0e8488ac"),
compressed: hexToBytes("00e34cce70c86373273efcc54ce7d2a491bb4a0e84"),
},
{
name: "pay-to-script-hash 1",
uncompressed: hexToBytes("a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87"),
compressed: hexToBytes("01da1745e9b549bd0bfa1a569971c77eba30cd5a4b"),
},
{
name: "pay-to-script-hash 2",
uncompressed: hexToBytes("a914f815b036d9bbbce5e9f2a00abd1bf3dc91e9551087"),
compressed: hexToBytes("01f815b036d9bbbce5e9f2a00abd1bf3dc91e95510"),
},
{
name: "pay-to-pubkey compressed 0x02",
uncompressed: hexToBytes("2102192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4ac"),
compressed: hexToBytes("02192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4"),
},
{
name: "pay-to-pubkey compressed 0x03",
uncompressed: hexToBytes("2103b0bd634234abbb1ba1e986e884185c61cf43e001f9137f23c2c409273eb16e65ac"),
compressed: hexToBytes("03b0bd634234abbb1ba1e986e884185c61cf43e001f9137f23c2c409273eb16e65"),
},
{
name: "pay-to-pubkey uncompressed 0x04 even",
uncompressed: hexToBytes("4104192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b40d45264838c0bd96852662ce6a847b197376830160c6d2eb5e6a4c44d33f453eac"),
compressed: hexToBytes("04192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b40d45264838c0bd96852662ce6a847b197376830160c6d2eb5e6a4c44d33f453e"),
},
{
name: "pay-to-pubkey uncompressed 0x04 odd",
uncompressed: hexToBytes("410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac"),
compressed: hexToBytes("0411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3"),
},
{
name: "pay-to-pubkey invalid pubkey",
uncompressed: hexToBytes("3302aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac"),
compressed: hexToBytes("293302aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaac"),
},
{
name: "null data",
uncompressed: hexToBytes("6a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
compressed: hexToBytes("286a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
},
{
name: "requires 2 size bytes - data push 200 bytes",
uncompressed: append(hexToBytes("4cc8"), bytes.Repeat([]byte{0x00}, 200)...),
// [0x80, 0x50] = 208 as a variable length quantity
// [0x4c, 0xc8] = OP_PUSHDATA1 200
compressed: append(hexToBytes("80504cc8"), bytes.Repeat([]byte{0x00}, 200)...),
},
}
for _, test := range tests {
// Ensure the function to calculate the serialized size without
// actually serializing the value is calculated properly.
gotSize := compressedScriptSize(test.uncompressed)
if gotSize != len(test.compressed) {
t.Errorf("compressedScriptSize (%s): did not get "+
"expected size - got %d, want %d", test.name,
gotSize, len(test.compressed))
continue
}
// Ensure the script compresses to the expected bytes.
gotCompressed := make([]byte, gotSize)
gotBytesWritten := PutCompressedScript(gotCompressed,
test.uncompressed)
if !bytes.Equal(gotCompressed, test.compressed) {
t.Errorf("PutCompressedScript (%s): did not get "+
"expected bytes - got %x, want %x", test.name,
gotCompressed, test.compressed)
continue
}
if gotBytesWritten != len(test.compressed) {
t.Errorf("PutCompressedScript (%s): did not get "+
"expected number of bytes written - got %d, "+
"want %d", test.name, gotBytesWritten,
len(test.compressed))
continue
}
// Ensure the compressed script size is properly decoded from
// the compressed script.
gotDecodedSize := DecodeCompressedScriptSize(test.compressed)
if gotDecodedSize != len(test.compressed) {
t.Errorf("DecodeCompressedScriptSize (%s): did not get "+
"expected size - got %d, want %d", test.name,
gotDecodedSize, len(test.compressed))
continue
}
// Ensure the script decompresses to the expected bytes.
gotDecompressed := DecompressScript(test.compressed)
if !bytes.Equal(gotDecompressed, test.uncompressed) {
t.Errorf("DecompressScript (%s): did not get expected "+
"bytes - got %x, want %x", test.name,
gotDecompressed, test.uncompressed)
continue
}
}
}
// TestScriptCompressionErrors ensures calling various functions related to
// script compression with incorrect data returns the expected results.
func TestScriptCompressionErrors(t *testing.T) {
t.Parallel()
// A nil script must result in a decoded size of 0.
if gotSize := DecodeCompressedScriptSize(nil); gotSize != 0 {
t.Fatalf("DecodeCompressedScriptSize with nil script did not "+
"return 0 - got %d", gotSize)
}
// A nil script must result in a nil decompressed script.
if gotScript := DecompressScript(nil); gotScript != nil {
t.Fatalf("DecompressScript with nil script did not return nil "+
"decompressed script - got %x", gotScript)
}
// A compressed script for a pay-to-pubkey (uncompressed) that results
// in an invalid pubkey must result in a nil decompressed script.
compressedScript := hexToBytes("04012d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b4")
if gotScript := DecompressScript(compressedScript); gotScript != nil {
t.Fatalf("DecompressScript with compressed pay-to-"+
"uncompressed-pubkey that is invalid did not return "+
"nil decompressed script - got %x", gotScript)
}
}
// TestAmountCompression ensures the domain-specific transaction output amount
// compression and decompression works as expected.
func TestAmountCompression(t *testing.T) {
t.Parallel()
tests := []struct {
name string
uncompressed uint64
compressed uint64
}{
{
name: "0 BTC (sometimes used in nulldata)",
uncompressed: 0,
compressed: 0,
},
{
name: "546 Satoshi (current network dust value)",
uncompressed: 546,
compressed: 4911,
},
{
name: "0.00001 BTC (typical transaction fee)",
uncompressed: 1000,
compressed: 4,
},
{
name: "0.0001 BTC (typical transaction fee)",
uncompressed: 10000,
compressed: 5,
},
{
name: "0.12345678 BTC",
uncompressed: 12345678,
compressed: 111111101,
},
{
name: "0.5 BTC",
uncompressed: 50000000,
compressed: 48,
},
{
name: "1 BTC",
uncompressed: 100000000,
compressed: 9,
},
{
name: "5 BTC",
uncompressed: 500000000,
compressed: 49,
},
{
name: "21000000 BTC (max minted coins)",
uncompressed: 2100000000000000,
compressed: 21000000,
},
}
for _, test := range tests {
// Ensure the amount compresses to the expected value.
gotCompressed := CompressTxOutAmount(test.uncompressed)
if gotCompressed != test.compressed {
t.Errorf("CompressTxOutAmount (%s): did not get "+
"expected value - got %d, want %d", test.name,
gotCompressed, test.compressed)
continue
}
// Ensure the value decompresses to the expected value.
gotDecompressed := DecompressTxOutAmount(test.compressed)
if gotDecompressed != test.uncompressed {
t.Errorf("deCompressTxOutAmount (%s): did not get "+
"expected value - got %d, want %d", test.name,
gotDecompressed, test.uncompressed)
continue
}
}
}
// TestCompressedTxOut ensures the transaction output serialization and
// deserialization works as expected.
func TestCompressedTxOut(t *testing.T) {
t.Parallel()
tests := []struct {
name string
amount uint64
pkScript []byte
compressed []byte
}{
{
name: "nulldata with 0 BTC",
amount: 0,
pkScript: hexToBytes("6a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
compressed: hexToBytes("00286a200102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"),
},
{
name: "pay-to-pubkey-hash dust",
amount: 546,
pkScript: hexToBytes("76a9141018853670f9f3b0582c5b9ee8ce93764ac32b9388ac"),
compressed: hexToBytes("a52f001018853670f9f3b0582c5b9ee8ce93764ac32b93"),
},
{
name: "pay-to-pubkey uncompressed 1 BTC",
amount: 100000000,
pkScript: hexToBytes("4104192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b40d45264838c0bd96852662ce6a847b197376830160c6d2eb5e6a4c44d33f453eac"),
compressed: hexToBytes("0904192d74d0cb94344c9569c2e77901573d8d7903c3ebec3a957724895dca52c6b40d45264838c0bd96852662ce6a847b197376830160c6d2eb5e6a4c44d33f453e"),
},
}
for _, test := range tests {
// Ensure the function to calculate the serialized size without
// actually serializing the txout is calculated properly.
gotSize := compressedTxOutSize(test.amount, test.pkScript)
if gotSize != len(test.compressed) {
t.Errorf("compressedTxOutSize (%s): did not get "+
"expected size - got %d, want %d", test.name,
gotSize, len(test.compressed))
continue
}
// Ensure the txout compresses to the expected value.
gotCompressed := make([]byte, gotSize)
gotBytesWritten := putCompressedTxOut(gotCompressed,
test.amount, test.pkScript)
if !bytes.Equal(gotCompressed, test.compressed) {
t.Errorf("compressTxOut (%s): did not get expected "+
"bytes - got %x, want %x", test.name,
gotCompressed, test.compressed)
continue
}
if gotBytesWritten != len(test.compressed) {
t.Errorf("compressTxOut (%s): did not get expected "+
"number of bytes written - got %d, want %d",
test.name, gotBytesWritten,
len(test.compressed))
continue
}
// Ensure the serialized bytes are decoded back to the expected
// uncompressed values.
gotAmount, gotScript, gotBytesRead, err := decodeCompressedTxOut(
test.compressed)
if err != nil {
t.Errorf("decodeCompressedTxOut (%s): unexpected "+
"error: %v", test.name, err)
continue
}
if gotAmount != test.amount {
t.Errorf("decodeCompressedTxOut (%s): did not get "+
"expected amount - got %d, want %d",
test.name, gotAmount, test.amount)
continue
}
if !bytes.Equal(gotScript, test.pkScript) {
t.Errorf("decodeCompressedTxOut (%s): did not get "+
"expected script - got %x, want %x",
test.name, gotScript, test.pkScript)
continue
}
if gotBytesRead != len(test.compressed) {
t.Errorf("decodeCompressedTxOut (%s): did not get "+
"expected number of bytes read - got %d, want %d",
test.name, gotBytesRead, len(test.compressed))
continue
}
}
}
// TestTxOutCompressionErrors ensures calling various functions related to
// txout compression with incorrect data returns the expected results.
func TestTxOutCompressionErrors(t *testing.T) {
t.Parallel()
// A compressed txout with missing compressed script must error.
compressedTxOut := hexToBytes("00")
_, _, _, err := decodeCompressedTxOut(compressedTxOut)
if err == nil {
t.Fatalf("decodeCompressedTxOut with missing compressed script "+
"did not return expected error type - got %T, want "+
"errDeserialize", err)
}
// A compressed txout with short compressed script must error.
compressedTxOut = hexToBytes("0010")
_, _, _, err = decodeCompressedTxOut(compressedTxOut)
if err == nil {
t.Fatalf("decodeCompressedTxOut with short compressed script "+
"did not return expected error type - got %T, want "+
"errDeserialize", err)
}
}

602
utils/script/opcode.go Normal file
View File

@@ -0,0 +1,602 @@
// Copyright (c) 2013-2017 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package script
// An opcode defines the information related to a txscript opcode. opfunc, if
// present, is the function to call to perform the opcode on the script. The
// current script is passed in as a slice with the first member being the opcode
// itself.
type opcode struct {
value byte
name string
length int
}
// These constants are the values of the official opcodes used on the btc wiki,
// in bitcoin core and in most if not all other references and software related
// to handling BTC scripts.
const (
OP_0 = 0x00 // 0
OP_FALSE = 0x00 // 0 - AKA OP_0
OP_DATA_1 = 0x01 // 1
OP_DATA_2 = 0x02 // 2
OP_DATA_3 = 0x03 // 3
OP_DATA_4 = 0x04 // 4
OP_DATA_5 = 0x05 // 5
OP_DATA_6 = 0x06 // 6
OP_DATA_7 = 0x07 // 7
OP_DATA_8 = 0x08 // 8
OP_DATA_9 = 0x09 // 9
OP_DATA_10 = 0x0a // 10
OP_DATA_11 = 0x0b // 11
OP_DATA_12 = 0x0c // 12
OP_DATA_13 = 0x0d // 13
OP_DATA_14 = 0x0e // 14
OP_DATA_15 = 0x0f // 15
OP_DATA_16 = 0x10 // 16
OP_DATA_17 = 0x11 // 17
OP_DATA_18 = 0x12 // 18
OP_DATA_19 = 0x13 // 19
OP_DATA_20 = 0x14 // 20
OP_DATA_21 = 0x15 // 21
OP_DATA_22 = 0x16 // 22
OP_DATA_23 = 0x17 // 23
OP_DATA_24 = 0x18 // 24
OP_DATA_25 = 0x19 // 25
OP_DATA_26 = 0x1a // 26
OP_DATA_27 = 0x1b // 27
OP_DATA_28 = 0x1c // 28
OP_DATA_29 = 0x1d // 29
OP_DATA_30 = 0x1e // 30
OP_DATA_31 = 0x1f // 31
OP_DATA_32 = 0x20 // 32
OP_DATA_33 = 0x21 // 33
OP_DATA_34 = 0x22 // 34
OP_DATA_35 = 0x23 // 35
OP_DATA_36 = 0x24 // 36
OP_DATA_37 = 0x25 // 37
OP_DATA_38 = 0x26 // 38
OP_DATA_39 = 0x27 // 39
OP_DATA_40 = 0x28 // 40
OP_DATA_41 = 0x29 // 41
OP_DATA_42 = 0x2a // 42
OP_DATA_43 = 0x2b // 43
OP_DATA_44 = 0x2c // 44
OP_DATA_45 = 0x2d // 45
OP_DATA_46 = 0x2e // 46
OP_DATA_47 = 0x2f // 47
OP_DATA_48 = 0x30 // 48
OP_DATA_49 = 0x31 // 49
OP_DATA_50 = 0x32 // 50
OP_DATA_51 = 0x33 // 51
OP_DATA_52 = 0x34 // 52
OP_DATA_53 = 0x35 // 53
OP_DATA_54 = 0x36 // 54
OP_DATA_55 = 0x37 // 55
OP_DATA_56 = 0x38 // 56
OP_DATA_57 = 0x39 // 57
OP_DATA_58 = 0x3a // 58
OP_DATA_59 = 0x3b // 59
OP_DATA_60 = 0x3c // 60
OP_DATA_61 = 0x3d // 61
OP_DATA_62 = 0x3e // 62
OP_DATA_63 = 0x3f // 63
OP_DATA_64 = 0x40 // 64
OP_DATA_65 = 0x41 // 65
OP_DATA_66 = 0x42 // 66
OP_DATA_67 = 0x43 // 67
OP_DATA_68 = 0x44 // 68
OP_DATA_69 = 0x45 // 69
OP_DATA_70 = 0x46 // 70
OP_DATA_71 = 0x47 // 71
OP_DATA_72 = 0x48 // 72
OP_DATA_73 = 0x49 // 73
OP_DATA_74 = 0x4a // 74
OP_DATA_75 = 0x4b // 75
OP_PUSHDATA1 = 0x4c // 76
OP_PUSHDATA2 = 0x4d // 77
OP_PUSHDATA4 = 0x4e // 78
OP_1NEGATE = 0x4f // 79
OP_RESERVED = 0x50 // 80
OP_1 = 0x51 // 81 - AKA OP_TRUE
OP_TRUE = 0x51 // 81
OP_2 = 0x52 // 82
OP_3 = 0x53 // 83
OP_4 = 0x54 // 84
OP_5 = 0x55 // 85
OP_6 = 0x56 // 86
OP_7 = 0x57 // 87
OP_8 = 0x58 // 88
OP_9 = 0x59 // 89
OP_10 = 0x5a // 90
OP_11 = 0x5b // 91
OP_12 = 0x5c // 92
OP_13 = 0x5d // 93
OP_14 = 0x5e // 94
OP_15 = 0x5f // 95
OP_16 = 0x60 // 96
OP_NOP = 0x61 // 97
OP_VER = 0x62 // 98
OP_IF = 0x63 // 99
OP_NOTIF = 0x64 // 100
OP_VERIF = 0x65 // 101
OP_VERNOTIF = 0x66 // 102
OP_ELSE = 0x67 // 103
OP_ENDIF = 0x68 // 104
OP_VERIFY = 0x69 // 105
OP_RETURN = 0x6a // 106
OP_TOALTSTACK = 0x6b // 107
OP_FROMALTSTACK = 0x6c // 108
OP_2DROP = 0x6d // 109
OP_2DUP = 0x6e // 110
OP_3DUP = 0x6f // 111
OP_2OVER = 0x70 // 112
OP_2ROT = 0x71 // 113
OP_2SWAP = 0x72 // 114
OP_IFDUP = 0x73 // 115
OP_DEPTH = 0x74 // 116
OP_DROP = 0x75 // 117
OP_DUP = 0x76 // 118
OP_NIP = 0x77 // 119
OP_OVER = 0x78 // 120
OP_PICK = 0x79 // 121
OP_ROLL = 0x7a // 122
OP_ROT = 0x7b // 123
OP_SWAP = 0x7c // 124
OP_TUCK = 0x7d // 125
OP_CAT = 0x7e // 126
OP_SUBSTR = 0x7f // 127
OP_LEFT = 0x80 // 128
OP_RIGHT = 0x81 // 129
OP_SIZE = 0x82 // 130
OP_INVERT = 0x83 // 131
OP_AND = 0x84 // 132
OP_OR = 0x85 // 133
OP_XOR = 0x86 // 134
OP_EQUAL = 0x87 // 135
OP_EQUALVERIFY = 0x88 // 136
OP_RESERVED1 = 0x89 // 137
OP_RESERVED2 = 0x8a // 138
OP_1ADD = 0x8b // 139
OP_1SUB = 0x8c // 140
OP_2MUL = 0x8d // 141
OP_2DIV = 0x8e // 142
OP_NEGATE = 0x8f // 143
OP_ABS = 0x90 // 144
OP_NOT = 0x91 // 145
OP_0NOTEQUAL = 0x92 // 146
OP_ADD = 0x93 // 147
OP_SUB = 0x94 // 148
OP_MUL = 0x95 // 149
OP_DIV = 0x96 // 150
OP_MOD = 0x97 // 151
OP_LSHIFT = 0x98 // 152
OP_RSHIFT = 0x99 // 153
OP_BOOLAND = 0x9a // 154
OP_BOOLOR = 0x9b // 155
OP_NUMEQUAL = 0x9c // 156
OP_NUMEQUALVERIFY = 0x9d // 157
OP_NUMNOTEQUAL = 0x9e // 158
OP_LESSTHAN = 0x9f // 159
OP_GREATERTHAN = 0xa0 // 160
OP_LESSTHANOREQUAL = 0xa1 // 161
OP_GREATERTHANOREQUAL = 0xa2 // 162
OP_MIN = 0xa3 // 163
OP_MAX = 0xa4 // 164
OP_WITHIN = 0xa5 // 165
OP_RIPEMD160 = 0xa6 // 166
OP_SHA1 = 0xa7 // 167
OP_SHA256 = 0xa8 // 168
OP_HASH160 = 0xa9 // 169
OP_HASH256 = 0xaa // 170
OP_CODESEPARATOR = 0xab // 171
OP_CHECKSIG = 0xac // 172
OP_CHECKSIGVERIFY = 0xad // 173
OP_CHECKMULTISIG = 0xae // 174
OP_CHECKMULTISIGVERIFY = 0xaf // 175
OP_NOP1 = 0xb0 // 176
OP_NOP2 = 0xb1 // 177
OP_CHECKLOCKTIMEVERIFY = 0xb1 // 177 - AKA OP_NOP2
OP_NOP3 = 0xb2 // 178
OP_CHECKSEQUENCEVERIFY = 0xb2 // 178 - AKA OP_NOP3
OP_NOP4 = 0xb3 // 179
OP_NOP5 = 0xb4 // 180
OP_NOP6 = 0xb5 // 181
OP_NOP7 = 0xb6 // 182
OP_NOP8 = 0xb7 // 183
OP_NOP9 = 0xb8 // 184
OP_NOP10 = 0xb9 // 185
OP_UNKNOWN186 = 0xba // 186
OP_UNKNOWN187 = 0xbb // 187
OP_UNKNOWN188 = 0xbc // 188
OP_UNKNOWN189 = 0xbd // 189
OP_UNKNOWN190 = 0xbe // 190
OP_UNKNOWN191 = 0xbf // 191
OP_UNKNOWN192 = 0xc0 // 192
OP_UNKNOWN193 = 0xc1 // 193
OP_UNKNOWN194 = 0xc2 // 194
OP_UNKNOWN195 = 0xc3 // 195
OP_UNKNOWN196 = 0xc4 // 196
OP_UNKNOWN197 = 0xc5 // 197
OP_UNKNOWN198 = 0xc6 // 198
OP_UNKNOWN199 = 0xc7 // 199
OP_UNKNOWN200 = 0xc8 // 200
OP_UNKNOWN201 = 0xc9 // 201
OP_UNKNOWN202 = 0xca // 202
OP_UNKNOWN203 = 0xcb // 203
OP_UNKNOWN204 = 0xcc // 204
OP_UNKNOWN205 = 0xcd // 205
OP_UNKNOWN206 = 0xce // 206
OP_UNKNOWN207 = 0xcf // 207
OP_UNKNOWN208 = 0xd0 // 208
OP_UNKNOWN209 = 0xd1 // 209
OP_UNKNOWN210 = 0xd2 // 210
OP_UNKNOWN211 = 0xd3 // 211
OP_UNKNOWN212 = 0xd4 // 212
OP_UNKNOWN213 = 0xd5 // 213
OP_UNKNOWN214 = 0xd6 // 214
OP_UNKNOWN215 = 0xd7 // 215
OP_UNKNOWN216 = 0xd8 // 216
OP_UNKNOWN217 = 0xd9 // 217
OP_UNKNOWN218 = 0xda // 218
OP_UNKNOWN219 = 0xdb // 219
OP_UNKNOWN220 = 0xdc // 220
OP_UNKNOWN221 = 0xdd // 221
OP_UNKNOWN222 = 0xde // 222
OP_UNKNOWN223 = 0xdf // 223
OP_UNKNOWN224 = 0xe0 // 224
OP_UNKNOWN225 = 0xe1 // 225
OP_UNKNOWN226 = 0xe2 // 226
OP_UNKNOWN227 = 0xe3 // 227
OP_UNKNOWN228 = 0xe4 // 228
OP_UNKNOWN229 = 0xe5 // 229
OP_UNKNOWN230 = 0xe6 // 230
OP_UNKNOWN231 = 0xe7 // 231
OP_UNKNOWN232 = 0xe8 // 232
OP_UNKNOWN233 = 0xe9 // 233
OP_UNKNOWN234 = 0xea // 234
OP_UNKNOWN235 = 0xeb // 235
OP_UNKNOWN236 = 0xec // 236
OP_UNKNOWN237 = 0xed // 237
OP_UNKNOWN238 = 0xee // 238
OP_UNKNOWN239 = 0xef // 239
OP_UNKNOWN240 = 0xf0 // 240
OP_UNKNOWN241 = 0xf1 // 241
OP_UNKNOWN242 = 0xf2 // 242
OP_UNKNOWN243 = 0xf3 // 243
OP_UNKNOWN244 = 0xf4 // 244
OP_UNKNOWN245 = 0xf5 // 245
OP_UNKNOWN246 = 0xf6 // 246
OP_UNKNOWN247 = 0xf7 // 247
OP_UNKNOWN248 = 0xf8 // 248
OP_UNKNOWN249 = 0xf9 // 249
OP_SMALLINTEGER = 0xfa // 250 - bitcoin core internal
OP_PUBKEYS = 0xfb // 251 - bitcoin core internal
OP_UNKNOWN252 = 0xfc // 252
OP_PUBKEYHASH = 0xfd // 253 - bitcoin core internal
OP_PUBKEY = 0xfe // 254 - bitcoin core internal
OP_INVALIDOPCODE = 0xff // 255 - bitcoin core internal
)
// Conditional execution constants.
const (
OpCondFalse = 0
OpCondTrue = 1
OpCondSkip = 2
)
// opcodeArray holds details about all possible opcodes such as how many bytes
// the opcode and any associated data should take, its human-readable name, and
// the handler function.
var opcodeArray = [256]opcode{
// Data push opcodes.
OP_FALSE: {OP_FALSE, "OP_0", 1},
OP_DATA_1: {OP_DATA_1, "OP_DATA_1", 2},
OP_DATA_2: {OP_DATA_2, "OP_DATA_2", 3},
OP_DATA_3: {OP_DATA_3, "OP_DATA_3", 4},
OP_DATA_4: {OP_DATA_4, "OP_DATA_4", 5},
OP_DATA_5: {OP_DATA_5, "OP_DATA_5", 6},
OP_DATA_6: {OP_DATA_6, "OP_DATA_6", 7},
OP_DATA_7: {OP_DATA_7, "OP_DATA_7", 8},
OP_DATA_8: {OP_DATA_8, "OP_DATA_8", 9},
OP_DATA_9: {OP_DATA_9, "OP_DATA_9", 10},
OP_DATA_10: {OP_DATA_10, "OP_DATA_10", 11},
OP_DATA_11: {OP_DATA_11, "OP_DATA_11", 12},
OP_DATA_12: {OP_DATA_12, "OP_DATA_12", 13},
OP_DATA_13: {OP_DATA_13, "OP_DATA_13", 14},
OP_DATA_14: {OP_DATA_14, "OP_DATA_14", 15},
OP_DATA_15: {OP_DATA_15, "OP_DATA_15", 16},
OP_DATA_16: {OP_DATA_16, "OP_DATA_16", 17},
OP_DATA_17: {OP_DATA_17, "OP_DATA_17", 18},
OP_DATA_18: {OP_DATA_18, "OP_DATA_18", 19},
OP_DATA_19: {OP_DATA_19, "OP_DATA_19", 20},
OP_DATA_20: {OP_DATA_20, "OP_DATA_20", 21},
OP_DATA_21: {OP_DATA_21, "OP_DATA_21", 22},
OP_DATA_22: {OP_DATA_22, "OP_DATA_22", 23},
OP_DATA_23: {OP_DATA_23, "OP_DATA_23", 24},
OP_DATA_24: {OP_DATA_24, "OP_DATA_24", 25},
OP_DATA_25: {OP_DATA_25, "OP_DATA_25", 26},
OP_DATA_26: {OP_DATA_26, "OP_DATA_26", 27},
OP_DATA_27: {OP_DATA_27, "OP_DATA_27", 28},
OP_DATA_28: {OP_DATA_28, "OP_DATA_28", 29},
OP_DATA_29: {OP_DATA_29, "OP_DATA_29", 30},
OP_DATA_30: {OP_DATA_30, "OP_DATA_30", 31},
OP_DATA_31: {OP_DATA_31, "OP_DATA_31", 32},
OP_DATA_32: {OP_DATA_32, "OP_DATA_32", 33},
OP_DATA_33: {OP_DATA_33, "OP_DATA_33", 34},
OP_DATA_34: {OP_DATA_34, "OP_DATA_34", 35},
OP_DATA_35: {OP_DATA_35, "OP_DATA_35", 36},
OP_DATA_36: {OP_DATA_36, "OP_DATA_36", 37},
OP_DATA_37: {OP_DATA_37, "OP_DATA_37", 38},
OP_DATA_38: {OP_DATA_38, "OP_DATA_38", 39},
OP_DATA_39: {OP_DATA_39, "OP_DATA_39", 40},
OP_DATA_40: {OP_DATA_40, "OP_DATA_40", 41},
OP_DATA_41: {OP_DATA_41, "OP_DATA_41", 42},
OP_DATA_42: {OP_DATA_42, "OP_DATA_42", 43},
OP_DATA_43: {OP_DATA_43, "OP_DATA_43", 44},
OP_DATA_44: {OP_DATA_44, "OP_DATA_44", 45},
OP_DATA_45: {OP_DATA_45, "OP_DATA_45", 46},
OP_DATA_46: {OP_DATA_46, "OP_DATA_46", 47},
OP_DATA_47: {OP_DATA_47, "OP_DATA_47", 48},
OP_DATA_48: {OP_DATA_48, "OP_DATA_48", 49},
OP_DATA_49: {OP_DATA_49, "OP_DATA_49", 50},
OP_DATA_50: {OP_DATA_50, "OP_DATA_50", 51},
OP_DATA_51: {OP_DATA_51, "OP_DATA_51", 52},
OP_DATA_52: {OP_DATA_52, "OP_DATA_52", 53},
OP_DATA_53: {OP_DATA_53, "OP_DATA_53", 54},
OP_DATA_54: {OP_DATA_54, "OP_DATA_54", 55},
OP_DATA_55: {OP_DATA_55, "OP_DATA_55", 56},
OP_DATA_56: {OP_DATA_56, "OP_DATA_56", 57},
OP_DATA_57: {OP_DATA_57, "OP_DATA_57", 58},
OP_DATA_58: {OP_DATA_58, "OP_DATA_58", 59},
OP_DATA_59: {OP_DATA_59, "OP_DATA_59", 60},
OP_DATA_60: {OP_DATA_60, "OP_DATA_60", 61},
OP_DATA_61: {OP_DATA_61, "OP_DATA_61", 62},
OP_DATA_62: {OP_DATA_62, "OP_DATA_62", 63},
OP_DATA_63: {OP_DATA_63, "OP_DATA_63", 64},
OP_DATA_64: {OP_DATA_64, "OP_DATA_64", 65},
OP_DATA_65: {OP_DATA_65, "OP_DATA_65", 66},
OP_DATA_66: {OP_DATA_66, "OP_DATA_66", 67},
OP_DATA_67: {OP_DATA_67, "OP_DATA_67", 68},
OP_DATA_68: {OP_DATA_68, "OP_DATA_68", 69},
OP_DATA_69: {OP_DATA_69, "OP_DATA_69", 70},
OP_DATA_70: {OP_DATA_70, "OP_DATA_70", 71},
OP_DATA_71: {OP_DATA_71, "OP_DATA_71", 72},
OP_DATA_72: {OP_DATA_72, "OP_DATA_72", 73},
OP_DATA_73: {OP_DATA_73, "OP_DATA_73", 74},
OP_DATA_74: {OP_DATA_74, "OP_DATA_74", 75},
OP_DATA_75: {OP_DATA_75, "OP_DATA_75", 76},
OP_PUSHDATA1: {OP_PUSHDATA1, "OP_PUSHDATA1", -1},
OP_PUSHDATA2: {OP_PUSHDATA2, "OP_PUSHDATA2", -2},
OP_PUSHDATA4: {OP_PUSHDATA4, "OP_PUSHDATA4", -4},
OP_1NEGATE: {OP_1NEGATE, "OP_1NEGATE", 1},
OP_RESERVED: {OP_RESERVED, "OP_RESERVED", 1},
OP_TRUE: {OP_TRUE, "OP_1", 1},
OP_2: {OP_2, "OP_2", 1},
OP_3: {OP_3, "OP_3", 1},
OP_4: {OP_4, "OP_4", 1},
OP_5: {OP_5, "OP_5", 1},
OP_6: {OP_6, "OP_6", 1},
OP_7: {OP_7, "OP_7", 1},
OP_8: {OP_8, "OP_8", 1},
OP_9: {OP_9, "OP_9", 1},
OP_10: {OP_10, "OP_10", 1},
OP_11: {OP_11, "OP_11", 1},
OP_12: {OP_12, "OP_12", 1},
OP_13: {OP_13, "OP_13", 1},
OP_14: {OP_14, "OP_14", 1},
OP_15: {OP_15, "OP_15", 1},
OP_16: {OP_16, "OP_16", 1},
// Control opcodes.
OP_NOP: {OP_NOP, "OP_NOP", 1},
OP_VER: {OP_VER, "OP_VER", 1},
OP_IF: {OP_IF, "OP_IF", 1},
OP_NOTIF: {OP_NOTIF, "OP_NOTIF", 1},
OP_VERIF: {OP_VERIF, "OP_VERIF", 1},
OP_VERNOTIF: {OP_VERNOTIF, "OP_VERNOTIF", 1},
OP_ELSE: {OP_ELSE, "OP_ELSE", 1},
OP_ENDIF: {OP_ENDIF, "OP_ENDIF", 1},
OP_VERIFY: {OP_VERIFY, "OP_VERIFY", 1},
OP_RETURN: {OP_RETURN, "OP_RETURN", 1},
OP_CHECKLOCKTIMEVERIFY: {OP_CHECKLOCKTIMEVERIFY, "OP_CHECKLOCKTIMEVERIFY", 1},
OP_CHECKSEQUENCEVERIFY: {OP_CHECKSEQUENCEVERIFY, "OP_CHECKSEQUENCEVERIFY", 1},
// Stack opcodes.
OP_TOALTSTACK: {OP_TOALTSTACK, "OP_TOALTSTACK", 1},
OP_FROMALTSTACK: {OP_FROMALTSTACK, "OP_FROMALTSTACK", 1},
OP_2DROP: {OP_2DROP, "OP_2DROP", 1},
OP_2DUP: {OP_2DUP, "OP_2DUP", 1},
OP_3DUP: {OP_3DUP, "OP_3DUP", 1},
OP_2OVER: {OP_2OVER, "OP_2OVER", 1},
OP_2ROT: {OP_2ROT, "OP_2ROT", 1},
OP_2SWAP: {OP_2SWAP, "OP_2SWAP", 1},
OP_IFDUP: {OP_IFDUP, "OP_IFDUP", 1},
OP_DEPTH: {OP_DEPTH, "OP_DEPTH", 1},
OP_DROP: {OP_DROP, "OP_DROP", 1},
OP_DUP: {OP_DUP, "OP_DUP", 1},
OP_NIP: {OP_NIP, "OP_NIP", 1},
OP_OVER: {OP_OVER, "OP_OVER", 1},
OP_PICK: {OP_PICK, "OP_PICK", 1},
OP_ROLL: {OP_ROLL, "OP_ROLL", 1},
OP_ROT: {OP_ROT, "OP_ROT", 1},
OP_SWAP: {OP_SWAP, "OP_SWAP", 1},
OP_TUCK: {OP_TUCK, "OP_TUCK", 1},
// Splice opcodes.
OP_CAT: {OP_CAT, "OP_CAT", 1},
OP_SUBSTR: {OP_SUBSTR, "OP_SUBSTR", 1},
OP_LEFT: {OP_LEFT, "OP_LEFT", 1},
OP_RIGHT: {OP_RIGHT, "OP_RIGHT", 1},
OP_SIZE: {OP_SIZE, "OP_SIZE", 1},
// Bitwise logic opcodes.
OP_INVERT: {OP_INVERT, "OP_INVERT", 1},
OP_AND: {OP_AND, "OP_AND", 1},
OP_OR: {OP_OR, "OP_OR", 1},
OP_XOR: {OP_XOR, "OP_XOR", 1},
OP_EQUAL: {OP_EQUAL, "OP_EQUAL", 1},
OP_EQUALVERIFY: {OP_EQUALVERIFY, "OP_EQUALVERIFY", 1},
OP_RESERVED1: {OP_RESERVED1, "OP_RESERVED1", 1},
OP_RESERVED2: {OP_RESERVED2, "OP_RESERVED2", 1},
// Numeric related opcodes.
OP_1ADD: {OP_1ADD, "OP_1ADD", 1},
OP_1SUB: {OP_1SUB, "OP_1SUB", 1},
OP_2MUL: {OP_2MUL, "OP_2MUL", 1},
OP_2DIV: {OP_2DIV, "OP_2DIV", 1},
OP_NEGATE: {OP_NEGATE, "OP_NEGATE", 1},
OP_ABS: {OP_ABS, "OP_ABS", 1},
OP_NOT: {OP_NOT, "OP_NOT", 1},
OP_0NOTEQUAL: {OP_0NOTEQUAL, "OP_0NOTEQUAL", 1},
OP_ADD: {OP_ADD, "OP_ADD", 1},
OP_SUB: {OP_SUB, "OP_SUB", 1},
OP_MUL: {OP_MUL, "OP_MUL", 1},
OP_DIV: {OP_DIV, "OP_DIV", 1},
OP_MOD: {OP_MOD, "OP_MOD", 1},
OP_LSHIFT: {OP_LSHIFT, "OP_LSHIFT", 1},
OP_RSHIFT: {OP_RSHIFT, "OP_RSHIFT", 1},
OP_BOOLAND: {OP_BOOLAND, "OP_BOOLAND", 1},
OP_BOOLOR: {OP_BOOLOR, "OP_BOOLOR", 1},
OP_NUMEQUAL: {OP_NUMEQUAL, "OP_NUMEQUAL", 1},
OP_NUMEQUALVERIFY: {OP_NUMEQUALVERIFY, "OP_NUMEQUALVERIFY", 1},
OP_NUMNOTEQUAL: {OP_NUMNOTEQUAL, "OP_NUMNOTEQUAL", 1},
OP_LESSTHAN: {OP_LESSTHAN, "OP_LESSTHAN", 1},
OP_GREATERTHAN: {OP_GREATERTHAN, "OP_GREATERTHAN", 1},
OP_LESSTHANOREQUAL: {OP_LESSTHANOREQUAL, "OP_LESSTHANOREQUAL", 1},
OP_GREATERTHANOREQUAL: {OP_GREATERTHANOREQUAL, "OP_GREATERTHANOREQUAL", 1},
OP_MIN: {OP_MIN, "OP_MIN", 1},
OP_MAX: {OP_MAX, "OP_MAX", 1},
OP_WITHIN: {OP_WITHIN, "OP_WITHIN", 1},
// Crypto opcodes.
OP_RIPEMD160: {OP_RIPEMD160, "OP_RIPEMD160", 1},
OP_SHA1: {OP_SHA1, "OP_SHA1", 1},
OP_SHA256: {OP_SHA256, "OP_SHA256", 1},
OP_HASH160: {OP_HASH160, "OP_HASH160", 1},
OP_HASH256: {OP_HASH256, "OP_HASH256", 1},
OP_CODESEPARATOR: {OP_CODESEPARATOR, "OP_CODESEPARATOR", 1},
OP_CHECKSIG: {OP_CHECKSIG, "OP_CHECKSIG", 1},
OP_CHECKSIGVERIFY: {OP_CHECKSIGVERIFY, "OP_CHECKSIGVERIFY", 1},
OP_CHECKMULTISIG: {OP_CHECKMULTISIG, "OP_CHECKMULTISIG", 1},
OP_CHECKMULTISIGVERIFY: {OP_CHECKMULTISIGVERIFY, "OP_CHECKMULTISIGVERIFY", 1},
// Reserved opcodes.
OP_NOP1: {OP_NOP1, "OP_NOP1", 1},
OP_NOP4: {OP_NOP4, "OP_NOP4", 1},
OP_NOP5: {OP_NOP5, "OP_NOP5", 1},
OP_NOP6: {OP_NOP6, "OP_NOP6", 1},
OP_NOP7: {OP_NOP7, "OP_NOP7", 1},
OP_NOP8: {OP_NOP8, "OP_NOP8", 1},
OP_NOP9: {OP_NOP9, "OP_NOP9", 1},
OP_NOP10: {OP_NOP10, "OP_NOP10", 1},
// Undefined opcodes.
OP_UNKNOWN186: {OP_UNKNOWN186, "OP_UNKNOWN186", 1},
OP_UNKNOWN187: {OP_UNKNOWN187, "OP_UNKNOWN187", 1},
OP_UNKNOWN188: {OP_UNKNOWN188, "OP_UNKNOWN188", 1},
OP_UNKNOWN189: {OP_UNKNOWN189, "OP_UNKNOWN189", 1},
OP_UNKNOWN190: {OP_UNKNOWN190, "OP_UNKNOWN190", 1},
OP_UNKNOWN191: {OP_UNKNOWN191, "OP_UNKNOWN191", 1},
OP_UNKNOWN192: {OP_UNKNOWN192, "OP_UNKNOWN192", 1},
OP_UNKNOWN193: {OP_UNKNOWN193, "OP_UNKNOWN193", 1},
OP_UNKNOWN194: {OP_UNKNOWN194, "OP_UNKNOWN194", 1},
OP_UNKNOWN195: {OP_UNKNOWN195, "OP_UNKNOWN195", 1},
OP_UNKNOWN196: {OP_UNKNOWN196, "OP_UNKNOWN196", 1},
OP_UNKNOWN197: {OP_UNKNOWN197, "OP_UNKNOWN197", 1},
OP_UNKNOWN198: {OP_UNKNOWN198, "OP_UNKNOWN198", 1},
OP_UNKNOWN199: {OP_UNKNOWN199, "OP_UNKNOWN199", 1},
OP_UNKNOWN200: {OP_UNKNOWN200, "OP_UNKNOWN200", 1},
OP_UNKNOWN201: {OP_UNKNOWN201, "OP_UNKNOWN201", 1},
OP_UNKNOWN202: {OP_UNKNOWN202, "OP_UNKNOWN202", 1},
OP_UNKNOWN203: {OP_UNKNOWN203, "OP_UNKNOWN203", 1},
OP_UNKNOWN204: {OP_UNKNOWN204, "OP_UNKNOWN204", 1},
OP_UNKNOWN205: {OP_UNKNOWN205, "OP_UNKNOWN205", 1},
OP_UNKNOWN206: {OP_UNKNOWN206, "OP_UNKNOWN206", 1},
OP_UNKNOWN207: {OP_UNKNOWN207, "OP_UNKNOWN207", 1},
OP_UNKNOWN208: {OP_UNKNOWN208, "OP_UNKNOWN208", 1},
OP_UNKNOWN209: {OP_UNKNOWN209, "OP_UNKNOWN209", 1},
OP_UNKNOWN210: {OP_UNKNOWN210, "OP_UNKNOWN210", 1},
OP_UNKNOWN211: {OP_UNKNOWN211, "OP_UNKNOWN211", 1},
OP_UNKNOWN212: {OP_UNKNOWN212, "OP_UNKNOWN212", 1},
OP_UNKNOWN213: {OP_UNKNOWN213, "OP_UNKNOWN213", 1},
OP_UNKNOWN214: {OP_UNKNOWN214, "OP_UNKNOWN214", 1},
OP_UNKNOWN215: {OP_UNKNOWN215, "OP_UNKNOWN215", 1},
OP_UNKNOWN216: {OP_UNKNOWN216, "OP_UNKNOWN216", 1},
OP_UNKNOWN217: {OP_UNKNOWN217, "OP_UNKNOWN217", 1},
OP_UNKNOWN218: {OP_UNKNOWN218, "OP_UNKNOWN218", 1},
OP_UNKNOWN219: {OP_UNKNOWN219, "OP_UNKNOWN219", 1},
OP_UNKNOWN220: {OP_UNKNOWN220, "OP_UNKNOWN220", 1},
OP_UNKNOWN221: {OP_UNKNOWN221, "OP_UNKNOWN221", 1},
OP_UNKNOWN222: {OP_UNKNOWN222, "OP_UNKNOWN222", 1},
OP_UNKNOWN223: {OP_UNKNOWN223, "OP_UNKNOWN223", 1},
OP_UNKNOWN224: {OP_UNKNOWN224, "OP_UNKNOWN224", 1},
OP_UNKNOWN225: {OP_UNKNOWN225, "OP_UNKNOWN225", 1},
OP_UNKNOWN226: {OP_UNKNOWN226, "OP_UNKNOWN226", 1},
OP_UNKNOWN227: {OP_UNKNOWN227, "OP_UNKNOWN227", 1},
OP_UNKNOWN228: {OP_UNKNOWN228, "OP_UNKNOWN228", 1},
OP_UNKNOWN229: {OP_UNKNOWN229, "OP_UNKNOWN229", 1},
OP_UNKNOWN230: {OP_UNKNOWN230, "OP_UNKNOWN230", 1},
OP_UNKNOWN231: {OP_UNKNOWN231, "OP_UNKNOWN231", 1},
OP_UNKNOWN232: {OP_UNKNOWN232, "OP_UNKNOWN232", 1},
OP_UNKNOWN233: {OP_UNKNOWN233, "OP_UNKNOWN233", 1},
OP_UNKNOWN234: {OP_UNKNOWN234, "OP_UNKNOWN234", 1},
OP_UNKNOWN235: {OP_UNKNOWN235, "OP_UNKNOWN235", 1},
OP_UNKNOWN236: {OP_UNKNOWN236, "OP_UNKNOWN236", 1},
OP_UNKNOWN237: {OP_UNKNOWN237, "OP_UNKNOWN237", 1},
OP_UNKNOWN238: {OP_UNKNOWN238, "OP_UNKNOWN238", 1},
OP_UNKNOWN239: {OP_UNKNOWN239, "OP_UNKNOWN239", 1},
OP_UNKNOWN240: {OP_UNKNOWN240, "OP_UNKNOWN240", 1},
OP_UNKNOWN241: {OP_UNKNOWN241, "OP_UNKNOWN241", 1},
OP_UNKNOWN242: {OP_UNKNOWN242, "OP_UNKNOWN242", 1},
OP_UNKNOWN243: {OP_UNKNOWN243, "OP_UNKNOWN243", 1},
OP_UNKNOWN244: {OP_UNKNOWN244, "OP_UNKNOWN244", 1},
OP_UNKNOWN245: {OP_UNKNOWN245, "OP_UNKNOWN245", 1},
OP_UNKNOWN246: {OP_UNKNOWN246, "OP_UNKNOWN246", 1},
OP_UNKNOWN247: {OP_UNKNOWN247, "OP_UNKNOWN247", 1},
OP_UNKNOWN248: {OP_UNKNOWN248, "OP_UNKNOWN248", 1},
OP_UNKNOWN249: {OP_UNKNOWN249, "OP_UNKNOWN249", 1},
// Bitcoin Core internal use opcode. Defined here for completeness.
OP_SMALLINTEGER: {OP_SMALLINTEGER, "OP_SMALLINTEGER", 1},
OP_PUBKEYS: {OP_PUBKEYS, "OP_PUBKEYS", 1},
OP_UNKNOWN252: {OP_UNKNOWN252, "OP_UNKNOWN252", 1},
OP_PUBKEYHASH: {OP_PUBKEYHASH, "OP_PUBKEYHASH", 1},
OP_PUBKEY: {OP_PUBKEY, "OP_PUBKEY", 1},
OP_INVALIDOPCODE: {OP_INVALIDOPCODE, "OP_INVALIDOPCODE", 1},
}
// opcodeOnelineRepls defines opcode names which are replaced when doing a
// one-line disassembly. This is done to match the output of the reference
// implementation while not changing the opcode names in the nicer full
// disassembly.
var opcodeOnelineRepls = map[string]string{
"OP_1NEGATE": "-1",
"OP_0": "0",
"OP_1": "1",
"OP_2": "2",
"OP_3": "3",
"OP_4": "4",
"OP_5": "5",
"OP_6": "6",
"OP_7": "7",
"OP_8": "8",
"OP_9": "9",
"OP_10": "10",
"OP_11": "11",
"OP_12": "12",
"OP_13": "13",
"OP_14": "14",
"OP_15": "15",
"OP_16": "16",
}
// parsedOpcode represents an opcode that has been parsed and includes any
// potential data associated with it.
type parsedOpcode struct {
opcode *opcode
data []byte
}

View File

@@ -1,15 +1,69 @@
package utils
import (
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"errors"
"fmt"
"strings"
"github.com/btcsuite/btcd/btcec/v2"
"github.com/btcsuite/btcd/btcec/v2/schnorr"
"github.com/btcsuite/btcd/btcutil"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/txscript"
)
func GetReversedStringHex(data string) (result string) {
return hex.EncodeToString(ReverseBytes([]byte(data)))
func DecodeTokensFromSwapPair(tickPair string) (token0, token1 string, err error) {
if len(tickPair) != 9 || tickPair[4] != '/' {
return "", "", errors.New("func: removeLiq tickPair invalid")
}
token0 = tickPair[:4]
token1 = tickPair[5:]
return token0, token1, nil
}
func GetValidUniqueLowerTickerTicker(ticker string) (lowerTicker string, err error) {
if len(ticker) != 4 && len(ticker) != 5 {
return "", errors.New("ticker len invalid")
}
lowerTicker = strings.ToLower(ticker)
return lowerTicker, nil
}
// single sha256 hash
func GetSha256(data []byte) (hash []byte) {
sha := sha256.New()
sha.Write(data[:])
hash = sha.Sum(nil)
return
}
func GetHash256(data []byte) (hash []byte) {
sha := sha256.New()
sha.Write(data[:])
tmp := sha.Sum(nil)
sha.Reset()
sha.Write(tmp)
hash = sha.Sum(nil)
return
}
func HashString(data []byte) (res string) {
if len(data) != 32 {
return "0000000000000000000000000000000000000000000000000000000000000000"
}
length := 32
var reverseData [32]byte
// need reverse
for i := 0; i < length; i++ {
reverseData[i] = data[length-i-1]
}
return hex.EncodeToString(reverseData[:])
}
func ReverseBytes(data []byte) (result []byte) {
@@ -19,7 +73,47 @@ func ReverseBytes(data []byte) (result []byte) {
return result
}
// GetAddressFromScript 。
// PayToTaprootScript creates a pk script for a pay-to-taproot output key.
func PayToTaprootScript(taprootKey *btcec.PublicKey) ([]byte, error) {
return txscript.NewScriptBuilder().
AddOp(txscript.OP_1).
AddData(schnorr.SerializePubKey(taprootKey)).
Script()
}
// PayToWitnessScript creates a pk script for a pay-to-wpkh output key.
func PayToWitnessScript(pubkey *btcec.PublicKey) ([]byte, error) {
return txscript.NewScriptBuilder().
AddOp(txscript.OP_0).
AddData(btcutil.Hash160(pubkey.SerializeCompressed())).
Script()
}
func GetPkScriptByAddress(addr string, netParams *chaincfg.Params) (pk []byte, err error) {
if len(addr) == 0 {
return nil, errors.New("decoded address empty")
}
addressObj, err := btcutil.DecodeAddress(addr, netParams)
if err != nil {
if len(addr) != 68 || !strings.HasPrefix(addr, "6a20") {
return nil, errors.New("decoded address is of unknown format")
}
// check full hex
pkHex, err := hex.DecodeString(addr)
if err != nil {
return nil, errors.New("decoded address is of unknown format")
}
return pkHex, nil
}
addressPkScript, err := txscript.PayToAddrScript(addressObj)
if err != nil {
return nil, errors.New("decoded address is of unknown format")
}
return addressPkScript, nil
}
// GetAddressFromScript Use btcsuite to get address
func GetAddressFromScript(script []byte, params *chaincfg.Params) (string, error) {
scriptClass, addresses, _, err := txscript.ExtractPkScriptAddrs(script, params)
if err != nil {
@@ -36,3 +130,54 @@ func GetAddressFromScript(script []byte, params *chaincfg.Params) (string, error
return addresses[0].EncodeAddress(), nil
}
func GetModuleFromScript(script []byte) (module string, ok bool) {
if len(script) < 34 || len(script) > 38 {
return "", false
}
if script[0] != 0x6a {
return "", false
}
if int(script[1])+2 != len(script) {
return "", false
}
var idx uint32
if script[1] <= 32 {
idx = uint32(0)
} else if script[1] <= 33 {
idx = uint32(script[34])
} else if script[1] <= 34 {
idx = uint32(binary.LittleEndian.Uint16(script[34:36]))
} else if script[1] <= 35 {
idx = uint32(script[34]) | uint32(script[35])<<8 | uint32(script[36])<<16
} else if script[1] <= 36 {
idx = binary.LittleEndian.Uint32(script[34:38])
}
module = fmt.Sprintf("%si%d", HashString(script[2:34]), idx)
return module, true
}
func DecodeInscriptionFromBin(script []byte) (id string) {
n := len(script)
if n < 32 || n > 36 {
return ""
}
var idx uint32
if n == 32 {
idx = uint32(0)
} else if n <= 33 {
idx = uint32(script[32])
} else if n <= 34 {
idx = uint32(binary.LittleEndian.Uint16(script[32:34]))
} else if n <= 35 {
idx = uint32(script[32]) | uint32(script[33])<<8 | uint32(script[34])<<16
} else if n <= 36 {
idx = binary.LittleEndian.Uint32(script[32:36])
}
id = fmt.Sprintf("%si%d", HashString(script[:32]), idx)
return id
}