Compare commits

...

40 Commits

Author SHA1 Message Date
Waris Aiemworawutikul
e5e742cfa5 fix: handle block timeout = 0 2024-07-31 12:16:26 +07:00
Waris Aiemworawutikul
10d31de197 fix: wrong timestamp format. 2024-07-30 14:45:02 +07:00
Waris Aiemworawutikul
eab0eb5ac3 feat: add extra unit test. 2024-07-26 18:05:54 +07:00
Waris Aiemworawutikul
1d2cc88f90 fix: change to using input values since output values deducted fee. 2024-07-26 17:57:43 +07:00
Waris Aiemworawutikul
8a1c096656 refactor: convert to unit tests. 2024-07-26 17:07:33 +07:00
Waris Aiemworawutikul
17ab11200a refactor: prepare for moving integration tests. 2024-07-26 12:01:11 +07:00
Waris Aiemworawutikul
1358b96165 fix: delegate tx hash not record into db. 2024-07-26 11:23:01 +07:00
Waris Aiemworawutikul
2c0a01da56 fix: refactor 2024-07-25 17:52:13 +07:00
Waris Aiemworawutikul
a5651f9812 fix: refactor 2024-07-25 17:09:50 +07:00
Waris Aiemworawutikul
df59e5fb46 fix: 2024-07-23 18:30:07 +07:00
Waris Aiemworawutikul
43a85c5171 fix: minor mistakes. 2024-07-23 18:04:34 +07:00
Waris Aiemworawutikul
5574db8691 fix: move last_block_default to config file. 2024-07-23 13:39:38 +07:00
Waris Aiemworawutikul
5497ef85c6 fix: remove unused code. 2024-07-19 19:17:17 +07:00
Waris Aiemworawutikul
28c42c02b7 fix: sanity refactor. 2024-07-19 19:05:06 +07:00
Waris Aiemworawutikul
38cf4c95a5 fix: more consistent SQL 2024-07-19 12:02:45 +07:00
Waris Aiemworawutikul
b8ed647d91 fix: 50% chance public key compare incorrectly. 2024-07-19 11:58:50 +07:00
Waris Aiemworawutikul
9f327c58fa fix: Cannot run nodesale test because qtx is not initiated. 2024-07-18 14:56:15 +07:00
Gaze
dd35ab125e feat: add todo note 2024-07-16 15:47:30 +07:00
Gaze
276b20abca fix: handle error 2024-07-16 15:41:40 +07:00
Gaze
d62445887b fix: remove os.Exit 2024-07-16 15:37:56 +07:00
Gaze
cbc6bde3b5 fix: try to skip test in ci 2024-07-16 15:31:54 +07:00
Gaze
f150451dd7 ci: use echo to create new file 2024-07-16 15:27:52 +07:00
Gaze
0b09724c71 ci: touch result file 2024-07-16 15:23:04 +07:00
Gaze
237d0f0d73 ci: try to tidy before testing 2024-07-16 15:09:02 +07:00
Waris Aiemworawutikul
b10b8c59d7 fix: bug UTC time. 2024-07-16 13:45:04 +07:00
Waris Aiemworawutikul
6be4c877d6 fix: add entity 2024-07-16 13:45:04 +07:00
Waris Aiemworawutikul
c86380718f fix: fix table type. 2024-07-16 13:45:04 +07:00
Waris Aiemworawutikul
1e462a09d9 fix: refactored. 2024-07-16 13:45:04 +07:00
Waris Aiemworawutikul
338d4d47b3 feat: recover nodesale module. 2024-07-16 13:45:04 +07:00
gazenw
880f4b2e6a fix: handle case where input rune id is not found (#37) 2024-07-15 18:32:28 +07:00
Gaze
3f727dc11b Merge remote-tracking branch 'origin/main' into develop 2024-07-15 16:33:56 +07:00
Planxnx
60717ecc65 feat(requestlogger): add response headers 2024-07-12 00:18:15 +07:00
Planxnx
6998adedb0 fix(requestlogger): logging all request headers 2024-07-11 23:53:27 +07:00
Thanee Charattrakool
add0a541b5 feat: Request Logger fields (#35)
* feat: add with request headers config

* feat: add with fields config

* feat: format request queries
2024-07-11 23:41:18 +07:00
gazenw
dad02bf61a Merge pull request #34 from gaze-network/develop
feat: release v0.2.7
2024-07-09 16:15:35 +07:00
Gaze
694baef0aa chore: golangci-lint 2024-07-09 15:48:09 +07:00
gazenw
47119c3220 feat: remove unnecessary verbose query (#33) 2024-07-09 15:44:14 +07:00
gazenw
6203b104db Merge pull request #32 from gaze-network/develop
feat: release v0.2.5
2024-07-08 14:50:40 +07:00
gazenw
b24f27ec9a fix: incorrect condition for finding output destinations (#31) 2024-07-08 14:32:58 +07:00
Gaze
a663f909fa Merge remote-tracking branch 'origin/main' into develop 2024-07-04 12:46:51 +07:00
60 changed files with 5872 additions and 36 deletions

View File

@@ -58,6 +58,9 @@ jobs:
cache: true # caching and restoring go modules and build outputs.
- run: echo "GOVERSION=$(go version)" >> $GITHUB_ENV
- name: Touch test result file
run: echo "" > test_output.json
- name: Build
run: go build -v ./...

View File

@@ -17,6 +17,7 @@ import (
"github.com/gaze-network/indexer-network/common/errs"
"github.com/gaze-network/indexer-network/core/indexer"
"github.com/gaze-network/indexer-network/internal/config"
"github.com/gaze-network/indexer-network/modules/nodesale"
"github.com/gaze-network/indexer-network/modules/runes"
"github.com/gaze-network/indexer-network/pkg/automaxprocs"
"github.com/gaze-network/indexer-network/pkg/logger"
@@ -39,6 +40,7 @@ import (
// Register Modules
var Modules = do.Package(
do.LazyNamed("runes", runes.New),
do.LazyNamed("nodesale", nodesale.New),
)
func NewRunCommand() *cobra.Command {

View File

@@ -6,13 +6,15 @@ import (
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/common/errs"
"github.com/gaze-network/indexer-network/core/constants"
"github.com/gaze-network/indexer-network/modules/nodesale"
"github.com/gaze-network/indexer-network/modules/runes"
"github.com/spf13/cobra"
)
var versions = map[string]string{
"": constants.Version,
"runes": runes.Version,
"": constants.Version,
"runes": runes.Version,
"nodesale": nodesale.Version,
}
type versionCmdOptions struct {

View File

@@ -47,3 +47,11 @@ modules:
password: "password"
db_name: "postgres"
# url: "postgres://postgres:password@localhost:5432/postgres?sslmode=prefer" # [Optional] This will override other database credentials above.
nodesale:
postgres:
host: "localhost"
port: 5432
user: "postgres"
password: "P@ssw0rd"
db_name: "postgres"
last_block_default: 400

View File

@@ -243,39 +243,32 @@ func (d *BitcoinNodeDatasource) prepareRange(fromHeight, toHeight int64) (start,
}
// GetTransaction fetch transaction from Bitcoin node
func (d *BitcoinNodeDatasource) GetTransactionByHash(ctx context.Context, txHash chainhash.Hash) (*types.Transaction, error) {
func (d *BitcoinNodeDatasource) GetRawTransactionAndHeightByTxHash(ctx context.Context, txHash chainhash.Hash) (*wire.MsgTx, int64, error) {
rawTxVerbose, err := d.btcclient.GetRawTransactionVerbose(&txHash)
if err != nil {
return nil, errors.Wrap(err, "failed to get raw transaction")
return nil, 0, errors.Wrap(err, "failed to get raw transaction")
}
blockHash, err := chainhash.NewHashFromStr(rawTxVerbose.BlockHash)
if err != nil {
return nil, errors.Wrap(err, "failed to parse block hash")
return nil, 0, errors.Wrap(err, "failed to parse block hash")
}
block, err := d.btcclient.GetBlockVerboseTx(blockHash)
block, err := d.btcclient.GetBlockVerbose(blockHash)
if err != nil {
return nil, errors.Wrap(err, "failed to get block header")
return nil, 0, errors.Wrap(err, "failed to get block header")
}
// parse tx
txBytes, err := hex.DecodeString(rawTxVerbose.Hex)
if err != nil {
return nil, errors.Wrap(err, "failed to decode transaction hex")
return nil, 0, errors.Wrap(err, "failed to decode transaction hex")
}
var msgTx wire.MsgTx
if err := msgTx.Deserialize(bytes.NewReader(txBytes)); err != nil {
return nil, errors.Wrap(err, "failed to deserialize transaction")
}
var txIndex uint32
for i, tx := range block.Tx {
if tx.Hex == rawTxVerbose.Hex {
txIndex = uint32(i)
break
}
return nil, 0, errors.Wrap(err, "failed to deserialize transaction")
}
return types.ParseMsgTx(&msgTx, block.Height, *blockHash, txIndex), nil
return &msgTx, block.Height, nil
}
// GetBlockHeader fetch block header from Bitcoin node

7
go.mod
View File

@@ -25,12 +25,15 @@ require (
github.com/valyala/fasthttp v1.51.0
go.uber.org/automaxprocs v1.5.3
golang.org/x/sync v0.7.0
google.golang.org/protobuf v1.33.0
)
require github.com/stretchr/objx v0.5.2 // indirect
require (
github.com/andybalholm/brotli v1.0.5 // indirect
github.com/bitonicnl/verify-signed-message v0.7.1
github.com/btcsuite/btcd/btcec/v2 v2.3.3 // indirect
github.com/btcsuite/btcd/btcec/v2 v2.3.3
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f // indirect
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd // indirect
github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792 // indirect
@@ -38,7 +41,7 @@ require (
github.com/cockroachdb/redact v1.1.5 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/decred/dcrd/crypto/blake256 v1.0.1 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/getsentry/sentry-go v0.18.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect

4
go.sum
View File

@@ -221,6 +221,8 @@ github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMV
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
@@ -310,6 +312,8 @@ google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQ
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=

View File

@@ -8,6 +8,7 @@ import (
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/common"
nodesaleconfig "github.com/gaze-network/indexer-network/modules/nodesale/config"
runesconfig "github.com/gaze-network/indexer-network/modules/runes/config"
"github.com/gaze-network/indexer-network/pkg/logger"
"github.com/gaze-network/indexer-network/pkg/logger/slogx"
@@ -61,7 +62,8 @@ type BitcoinNodeClient struct {
}
type Modules struct {
Runes runesconfig.Config `mapstructure:"runes"`
Runes runesconfig.Config `mapstructure:"runes"`
NodeSale nodesaleconfig.Config `mapstructure:"nodesale"`
}
type HTTPServerConfig struct {

View File

@@ -0,0 +1,99 @@
package httphandler
import (
"fmt"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/common/errs"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/protobuf"
"github.com/gofiber/fiber/v2"
"google.golang.org/protobuf/encoding/protojson"
)
type deployRequest struct {
DeployID string `params:"deployId"`
}
type tierResponse struct {
PriceSat uint32 `json:"priceSat"`
Limit uint32 `json:"limit"`
MaxPerAddress uint32 `json:"maxPerAddress"`
Sold int64 `json:"sold"`
}
type deployResponse struct {
Id string `json:"id"`
Name string `json:"name"`
StartsAt int64 `json:"startsAt"`
EndsAt int64 `json:"endsAt"`
Tiers []tierResponse `json:"tiers"`
SellerPublicKey string `json:"sellerPublicKey"`
MaxPerAddress uint32 `json:"maxPerAddress"`
DeployTxHash string `json:"deployTxHash"`
}
func (h *handler) deployHandler(ctx *fiber.Ctx) error {
var request deployRequest
err := ctx.ParamsParser(&request)
if err != nil {
return errors.Wrap(err, "cannot parse param")
}
var blockHeight uint64
var txIndex uint32
count, err := fmt.Sscanf(request.DeployID, "%d-%d", &blockHeight, &txIndex)
if count != 2 || err != nil {
return errs.NewPublicError("Invalid deploy ID")
}
deploys, err := h.nodeSaleDg.GetNodeSale(ctx.UserContext(), datagateway.GetNodeSaleParams{
BlockHeight: blockHeight,
TxIndex: txIndex,
})
if err != nil {
return errors.Wrap(err, "Cannot get NodeSale from db")
}
if len(deploys) < 1 {
return errs.NewPublicError("NodeSale not found")
}
deploy := deploys[0]
nodeCount, err := h.nodeSaleDg.GetNodeCountByTierIndex(ctx.UserContext(), datagateway.GetNodeCountByTierIndexParams{
SaleBlock: deploy.BlockHeight,
SaleTxIndex: deploy.TxIndex,
FromTier: 0,
ToTier: uint32(len(deploy.Tiers) - 1),
})
if err != nil {
return errors.Wrap(err, "Cannot get node count from db")
}
tiers := make([]protobuf.Tier, len(deploy.Tiers))
tierResponses := make([]tierResponse, len(deploy.Tiers))
for i, tierJson := range deploy.Tiers {
tier := &tiers[i]
err := protojson.Unmarshal(tierJson, tier)
if err != nil {
return errors.Wrap(err, "Failed to decode tiers json")
}
tierResponses[i].Limit = tiers[i].Limit
tierResponses[i].MaxPerAddress = tiers[i].MaxPerAddress
tierResponses[i].PriceSat = tiers[i].PriceSat
tierResponses[i].Sold = nodeCount[i].Count
}
err = ctx.JSON(&deployResponse{
Id: request.DeployID,
Name: deploy.Name,
StartsAt: deploy.StartsAt.UTC().Unix(),
EndsAt: deploy.EndsAt.UTC().Unix(),
Tiers: tierResponses,
SellerPublicKey: deploy.SellerPublicKey,
MaxPerAddress: deploy.MaxPerAddress,
DeployTxHash: deploy.DeployTxHash,
})
if err != nil {
return errors.Wrap(err, "Go fiber cannot parse JSON")
}
return nil
}

View File

@@ -0,0 +1,56 @@
package httphandler
import (
"encoding/json"
"time"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/modules/nodesale/protobuf"
"github.com/gofiber/fiber/v2"
)
type eventRequest struct {
WalletAddress string `query:"walletAddress"`
}
type eventResposne struct {
TxHash string `json:"txHash"`
BlockHeight int64 `json:"blockHeight"`
TxIndex int32 `json:"txIndex"`
WalletAddress string `json:"walletAddress"`
Action string `json:"action"`
ParsedMessage json.RawMessage `json:"parsedMessage"`
BlockTimestamp time.Time `json:"blockTimestamp"`
BlockHash string `json:"blockHash"`
}
func (h *handler) eventsHandler(ctx *fiber.Ctx) error {
var request eventRequest
err := ctx.QueryParser(&request)
if err != nil {
return errors.Wrap(err, "cannot parse query")
}
events, err := h.nodeSaleDg.GetEventsByWallet(ctx.UserContext(), request.WalletAddress)
if err != nil {
return errors.Wrap(err, "Can't get events from db")
}
responses := make([]eventResposne, len(events))
for i, event := range events {
responses[i].TxHash = event.TxHash
responses[i].BlockHeight = event.BlockHeight
responses[i].TxIndex = event.TxIndex
responses[i].WalletAddress = event.WalletAddress
responses[i].Action = protobuf.Action_name[event.Action]
responses[i].ParsedMessage = event.ParsedMessage
responses[i].BlockTimestamp = event.BlockTimestamp
responses[i].BlockHash = event.BlockHash
}
err = ctx.JSON(responses)
if err != nil {
return errors.Wrap(err, "Go fiber cannot parse JSON")
}
return nil
}

View File

@@ -0,0 +1,15 @@
package httphandler
import (
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
)
type handler struct {
nodeSaleDg datagateway.NodeSaleDataGateway
}
func New(datagateway datagateway.NodeSaleDataGateway) *handler {
h := handler{}
h.nodeSaleDg = datagateway
return &h
}

View File

@@ -0,0 +1,26 @@
package httphandler
import (
"github.com/cockroachdb/errors"
"github.com/gofiber/fiber/v2"
)
type infoResponse struct {
IndexedBlockHeight int64 `json:"indexedBlockHeight"`
IndexedBlockHash string `json:"indexedBlockHash"`
}
func (h *handler) infoHandler(ctx *fiber.Ctx) error {
block, err := h.nodeSaleDg.GetLastProcessedBlock(ctx.UserContext())
if err != nil {
return errors.Wrap(err, "Cannot get last processed block")
}
err = ctx.JSON(infoResponse{
IndexedBlockHeight: block.BlockHeight,
IndexedBlockHash: block.BlockHash,
})
if err != nil {
return errors.Wrap(err, "Go fiber cannot parse JSON")
}
return nil
}

View File

@@ -0,0 +1,72 @@
package httphandler
import (
"fmt"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/common/errs"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gofiber/fiber/v2"
)
type nodeRequest struct {
DeployId string `query:"deployId"`
OwnerPublicKey string `query:"ownerPublicKey"`
DelegateePublicKey string `query:"delegateePublicKey"`
}
type nodeResponse struct {
DeployId string `json:"deployId"`
NodeId uint32 `json:"nodeId"`
TierIndex int32 `json:"tierIndex"`
DelegatedTo string `json:"delegatedTo"`
OwnerPublicKey string `json:"ownerPublicKey"`
PurchaseTxHash string `json:"purchaseTxHash"`
DelegateTxHash string `json:"delegateTxHash"`
PurchaseBlockHeight int32 `json:"purchaseBlockHeight"`
}
func (h *handler) nodesHandler(ctx *fiber.Ctx) error {
var request nodeRequest
err := ctx.QueryParser(&request)
if err != nil {
return errors.Wrap(err, "cannot parse query")
}
ownerPublicKey := request.OwnerPublicKey
delegateePublicKey := request.DelegateePublicKey
var blockHeight int64
var txIndex int32
count, err := fmt.Sscanf(request.DeployId, "%d-%d", &blockHeight, &txIndex)
if count != 2 || err != nil {
return errs.NewPublicError("Invalid deploy ID")
}
nodes, err := h.nodeSaleDg.GetNodesByPubkey(ctx.UserContext(), datagateway.GetNodesByPubkeyParams{
SaleBlock: blockHeight,
SaleTxIndex: txIndex,
OwnerPublicKey: ownerPublicKey,
DelegatedTo: delegateePublicKey,
})
if err != nil {
return errors.Wrap(err, "Can't get nodes from db")
}
responses := make([]nodeResponse, len(nodes))
for i, node := range nodes {
responses[i].DeployId = request.DeployId
responses[i].NodeId = node.NodeID
responses[i].TierIndex = node.TierIndex
responses[i].DelegatedTo = node.DelegatedTo
responses[i].OwnerPublicKey = node.OwnerPublicKey
responses[i].PurchaseTxHash = node.PurchaseTxHash
responses[i].DelegateTxHash = node.DelegateTxHash
responses[i].PurchaseBlockHeight = txIndex
}
err = ctx.JSON(responses)
if err != nil {
return errors.Wrap(err, "Go fiber cannot parse JSON")
}
return nil
}

View File

@@ -0,0 +1,16 @@
package httphandler
import (
"github.com/gofiber/fiber/v2"
)
func (h *handler) Mount(router fiber.Router) error {
r := router.Group("/nodesale/v1")
r.Get("/info", h.infoHandler)
r.Get("/deploy/:deployId", h.deployHandler)
r.Get("/nodes", h.nodesHandler)
r.Get("/events", h.eventsHandler)
return nil
}

View File

@@ -0,0 +1,8 @@
package config
import "github.com/gaze-network/indexer-network/internal/postgres"
type Config struct {
Postgres postgres.Config `mapstructure:"postgres"`
LastBlockDefault int64 `mapstructure:"last_block_default"`
}

View File

@@ -0,0 +1,9 @@
BEGIN;
DROP TABLE IF EXISTS nodes;
DROP TABLE IF EXISTS node_sales;
DROP TABLE IF EXISTS events;
DROP TABLE IF EXISTS blocks;
COMMIT;

View File

@@ -0,0 +1,64 @@
BEGIN;
CREATE TABLE IF NOT EXISTS blocks (
"block_height" BIGINT NOT NULL,
"block_hash" TEXT NOT NULL,
"module" TEXT NOT NULL,
PRIMARY KEY("block_height", "block_hash")
);
CREATE TABLE IF NOT EXISTS events (
"tx_hash" TEXT NOT NULL PRIMARY KEY,
"block_height" BIGINT NOT NULL,
"tx_index" INTEGER NOT NULL,
"wallet_address" TEXT NOT NULL,
"valid" BOOLEAN NOT NULL,
"action" INTEGER NOT NULL,
"raw_message" BYTEA NOT NULL,
"parsed_message" JSONB NOT NULL DEFAULT '{}',
"block_timestamp" TIMESTAMP NOT NULL,
"block_hash" TEXT NOT NULL,
"metadata" JSONB NOT NULL DEFAULT '{}',
"reason" TEXT NOT NULL DEFAULT ''
);
INSERT INTO events("tx_hash", "block_height", "tx_index",
"wallet_address", "valid", "action",
"raw_message", "parsed_message", "block_timestamp",
"block_hash", "metadata")
VALUES ('', -1, -1,
'', false, -1,
'', '{}', NOW(),
'', '{}');
CREATE TABLE IF NOT EXISTS node_sales (
"block_height" BIGINT NOT NULL,
"tx_index" INTEGER NOT NULL,
"name" TEXT NOT NULL,
"starts_at" TIMESTAMP NOT NULL,
"ends_at" TIMESTAMP NOT NULL,
"tiers" JSONB[] NOT NULL,
"seller_public_key" TEXT NOT NULL,
"max_per_address" INTEGER NOT NULL,
"deploy_tx_hash" TEXT NOT NULL REFERENCES events(tx_hash) ON DELETE CASCADE,
"max_discount_percentage" INTEGER NOT NULL,
"seller_wallet" TEXT NOT NULL,
PRIMARY KEY ("block_height", "tx_index")
);
CREATE TABLE IF NOT EXISTS nodes (
"sale_block" BIGINT NOT NULL,
"sale_tx_index" INTEGER NOT NULL,
"node_id" INTEGER NOT NULL,
"tier_index" INTEGER NOT NULL,
"delegated_to" TEXT NOT NULL DEFAULT '',
"owner_public_key" TEXT NOT NULL,
"purchase_tx_hash" TEXT NOT NULL REFERENCES events(tx_hash) ON DELETE CASCADE,
"delegate_tx_hash" TEXT NOT NULL DEFAULT '' REFERENCES events(tx_hash) ON DELETE SET DEFAULT,
PRIMARY KEY("sale_block", "sale_tx_index", "node_id"),
FOREIGN KEY("sale_block", "sale_tx_index") REFERENCES node_sales("block_height", "tx_index")
);
COMMIT;

View File

@@ -0,0 +1,15 @@
-- name: GetLastProcessedBlock :one
SELECT * FROM blocks ORDER BY block_height DESC LIMIT 1;
-- name: GetBlock :one
SELECT * FROM blocks
WHERE "block_height" = $1;
-- name: RemoveBlockFrom :execrows
DELETE FROM blocks
WHERE "block_height" >= @from_block;
-- name: CreateBlock :exec
INSERT INTO blocks ("block_height", "block_hash", "module")
VALUES ($1, $2, $3);

View File

@@ -0,0 +1,14 @@
-- name: RemoveEventsFromBlock :execrows
DELETE FROM events
WHERE "block_height" >= @from_block;
-- name: CreateEvent :exec
INSERT INTO events ("tx_hash", "block_height", "tx_index", "wallet_address", "valid", "action",
"raw_message", "parsed_message", "block_timestamp", "block_hash", "metadata",
"reason")
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12);
-- name: GetEventsByWallet :many
SELECT *
FROM events
WHERE wallet_address = $1;

View File

@@ -0,0 +1,51 @@
-- name: ClearDelegate :execrows
UPDATE nodes
SET "delegated_to" = ''
WHERE "delegate_tx_hash" = '';
-- name: SetDelegates :execrows
UPDATE nodes
SET delegated_to = @delegatee, delegate_tx_hash = $3
WHERE sale_block = $1 AND
sale_tx_index = $2 AND
node_id = ANY (@node_ids::int[]);
-- name: GetNodesByIds :many
SELECT *
FROM nodes
WHERE sale_block = $1 AND
sale_tx_index = $2 AND
node_id = ANY (@node_ids::int[]);
-- name: GetNodesByOwner :many
SELECT *
FROM nodes
WHERE sale_block = $1 AND
sale_tx_index = $2 AND
owner_public_key = $3
ORDER BY tier_index;
-- name: GetNodesByPubkey :many
SELECT nodes.*
FROM nodes JOIN events ON nodes.purchase_tx_hash = events.tx_hash
WHERE sale_block = $1 AND
sale_tx_index = $2 AND
owner_public_key = $3 AND
delegated_to = $4;
-- name: CreateNode :exec
INSERT INTO nodes (sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8);
-- name: GetNodeCountByTierIndex :many
SELECT (tiers.tier_index)::int AS tier_index, count(nodes.tier_index)
FROM generate_series(@from_tier::int,@to_tier::int) AS tiers(tier_index)
LEFT JOIN
(SELECT *
FROM nodes
WHERE sale_block = $1 AND
sale_tx_index= $2)
AS nodes ON tiers.tier_index = nodes.tier_index
GROUP BY tiers.tier_index
ORDER BY tiers.tier_index;

View File

@@ -0,0 +1,9 @@
-- name: CreateNodeSale :exec
INSERT INTO node_sales ("block_height", "tx_index", "name", "starts_at", "ends_at", "tiers", "seller_public_key", "max_per_address", "deploy_tx_hash", "max_discount_percentage", "seller_wallet")
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11);
-- name: GetNodeSale :many
SELECT *
FROM node_sales
WHERE block_height = $1 AND
tx_index = $2;

View File

@@ -0,0 +1,3 @@
-- name: ClearEvents :exec
DELETE FROM events
WHERE tx_hash <> '';

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,76 @@
package datagateway
import (
"context"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
)
type NodeSaleDataGateway interface {
BeginNodeSaleTx(ctx context.Context) (NodeSaleDataGatewayWithTx, error)
CreateBlock(ctx context.Context, arg entity.Block) error
GetBlock(ctx context.Context, blockHeight int64) (*entity.Block, error)
GetLastProcessedBlock(ctx context.Context) (*entity.Block, error)
RemoveBlockFrom(ctx context.Context, fromBlock int64) (int64, error)
RemoveEventsFromBlock(ctx context.Context, fromBlock int64) (int64, error)
ClearDelegate(ctx context.Context) (int64, error)
GetNodesByIds(ctx context.Context, arg GetNodesByIdsParams) ([]entity.Node, error)
CreateEvent(ctx context.Context, arg entity.NodeSaleEvent) error
SetDelegates(ctx context.Context, arg SetDelegatesParams) (int64, error)
CreateNodeSale(ctx context.Context, arg entity.NodeSale) error
GetNodeSale(ctx context.Context, arg GetNodeSaleParams) ([]entity.NodeSale, error)
GetNodesByOwner(ctx context.Context, arg GetNodesByOwnerParams) ([]entity.Node, error)
CreateNode(ctx context.Context, arg entity.Node) error
GetNodeCountByTierIndex(ctx context.Context, arg GetNodeCountByTierIndexParams) ([]GetNodeCountByTierIndexRow, error)
GetNodesByPubkey(ctx context.Context, arg GetNodesByPubkeyParams) ([]entity.Node, error)
GetEventsByWallet(ctx context.Context, walletAddress string) ([]entity.NodeSaleEvent, error)
}
type NodeSaleDataGatewayWithTx interface {
NodeSaleDataGateway
Tx
}
type GetNodesByIdsParams struct {
SaleBlock uint64
SaleTxIndex uint32
NodeIds []uint32
}
type SetDelegatesParams struct {
SaleBlock uint64
SaleTxIndex int32
Delegatee string
DelegateTxHash string
NodeIds []uint32
}
type GetNodeSaleParams struct {
BlockHeight uint64
TxIndex uint32
}
type GetNodesByOwnerParams struct {
SaleBlock uint64
SaleTxIndex uint32
OwnerPublicKey string
}
type GetNodeCountByTierIndexParams struct {
SaleBlock uint64
SaleTxIndex uint32
FromTier uint32
ToTier uint32
}
type GetNodeCountByTierIndexRow struct {
TierIndex int32
Count int64
}
type GetNodesByPubkeyParams struct {
SaleBlock int64
SaleTxIndex int32
OwnerPublicKey string
DelegatedTo string
}

View File

@@ -0,0 +1,12 @@
package datagateway
import "context"
type Tx interface {
// Commit commits the DB transaction. All changes made after Begin() will be persisted. Calling Commit() will close the current transaction.
// If Commit() is called without a prior Begin(), it must be a no-op.
Commit(ctx context.Context) error
// Rollback rolls back the DB transaction. All changes made after Begin() will be discarded.
// Rollback() must be safe to call even if no transaction is active. Hence, a defer Rollback() is safe, even if Commit() was called prior with non-error conditions.
Rollback(ctx context.Context) error
}

View File

@@ -0,0 +1,61 @@
package nodesale
import (
"context"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/core/types"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
delegatevalidator "github.com/gaze-network/indexer-network/modules/nodesale/internal/validator/delegate"
)
func (p *Processor) ProcessDelegate(ctx context.Context, qtx datagateway.NodeSaleDataGatewayWithTx, block *types.Block, event NodeSaleEvent) error {
validator := delegatevalidator.New()
delegate := event.EventMessage.Delegate
_, nodes, err := validator.NodesExist(ctx, qtx, delegate.DeployID, delegate.NodeIDs)
if err != nil {
return errors.Wrap(err, "Cannot query")
}
for _, node := range nodes {
valid := validator.EqualXonlyPublicKey(node.OwnerPublicKey, event.TxPubkey)
if !valid {
break
}
}
err = qtx.CreateEvent(ctx, entity.NodeSaleEvent{
TxHash: event.Transaction.TxHash.String(),
TxIndex: int32(event.Transaction.Index),
Action: int32(event.EventMessage.Action),
RawMessage: event.RawData,
ParsedMessage: event.EventJson,
BlockTimestamp: block.Header.Timestamp,
BlockHash: event.Transaction.BlockHash.String(),
BlockHeight: event.Transaction.BlockHeight,
Valid: validator.Valid,
WalletAddress: p.PubkeyToPkHashAddress(event.TxPubkey).EncodeAddress(),
Metadata: nil,
Reason: validator.Reason,
})
if err != nil {
return errors.Wrap(err, "Failed to insert event")
}
if validator.Valid {
_, err = qtx.SetDelegates(ctx, datagateway.SetDelegatesParams{
SaleBlock: delegate.DeployID.Block,
SaleTxIndex: int32(delegate.DeployID.TxIndex),
Delegatee: delegate.DelegateePublicKey,
DelegateTxHash: event.Transaction.TxHash.String(),
NodeIds: delegate.NodeIDs,
})
if err != nil {
return errors.Wrap(err, "Failed to set delegate")
}
}
return nil
}

View File

@@ -0,0 +1,84 @@
package nodesale
import (
"context"
"encoding/hex"
"testing"
"github.com/btcsuite/btcd/btcec/v2"
"github.com/gaze-network/indexer-network/common"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway/mocks"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
"github.com/gaze-network/indexer-network/modules/nodesale/protobuf"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
)
func TestDelegate(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
buyerPrivateKey, _ := btcec.NewPrivateKey()
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
delegateePrivateKey, _ := btcec.NewPrivateKey()
delegateePubkeyHex := hex.EncodeToString(delegateePrivateKey.PubKey().SerializeCompressed())
delegateMessage := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_DELEGATE,
Delegate: &protobuf.ActionDelegate{
DelegateePublicKey: delegateePubkeyHex,
NodeIDs: []uint32{9, 10},
DeployID: &protobuf.ActionID{
Block: uint64(testBlockHeight) - 2,
TxIndex: uint32(testTxIndex) - 2,
},
},
}
event, block := assembleTestEvent(buyerPrivateKey, "131313131313", "131313131313", 0, 0, delegateMessage)
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == true
})).Return(nil)
mockDgTx.EXPECT().GetNodesByIds(mock.Anything, datagateway.GetNodesByIdsParams{
SaleBlock: delegateMessage.Delegate.DeployID.Block,
SaleTxIndex: delegateMessage.Delegate.DeployID.TxIndex,
NodeIds: []uint32{9, 10},
}).Return([]entity.Node{
{
SaleBlock: delegateMessage.Delegate.DeployID.Block,
SaleTxIndex: delegateMessage.Delegate.DeployID.TxIndex,
NodeID: 9,
TierIndex: 1,
DelegatedTo: "",
OwnerPublicKey: buyerPubkeyHex,
PurchaseTxHash: mock.Anything,
DelegateTxHash: "",
},
{
SaleBlock: delegateMessage.Delegate.DeployID.Block,
SaleTxIndex: delegateMessage.Delegate.DeployID.TxIndex,
NodeID: 10,
TierIndex: 2,
DelegatedTo: "",
OwnerPublicKey: buyerPubkeyHex,
PurchaseTxHash: mock.Anything,
DelegateTxHash: "",
},
}, nil)
mockDgTx.EXPECT().SetDelegates(mock.Anything, datagateway.SetDelegatesParams{
SaleBlock: delegateMessage.Delegate.DeployID.Block,
SaleTxIndex: int32(delegateMessage.Delegate.DeployID.TxIndex),
Delegatee: delegateMessage.Delegate.DelegateePublicKey,
DelegateTxHash: event.Transaction.TxHash.String(),
NodeIds: delegateMessage.Delegate.NodeIDs,
}).Return(2, nil)
err := p.ProcessDelegate(ctx, mockDgTx, block, event)
require.NoError(t, err)
}

View File

@@ -0,0 +1,67 @@
package nodesale
import (
"context"
"time"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/core/types"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/validator"
"google.golang.org/protobuf/encoding/protojson"
)
func (p *Processor) ProcessDeploy(ctx context.Context, qtx datagateway.NodeSaleDataGatewayWithTx, block *types.Block, event NodeSaleEvent) error {
deploy := event.EventMessage.Deploy
validator := validator.New()
validator.EqualXonlyPublicKey(deploy.SellerPublicKey, event.TxPubkey)
err := qtx.CreateEvent(ctx, entity.NodeSaleEvent{
TxHash: event.Transaction.TxHash.String(),
TxIndex: int32(event.Transaction.Index),
Action: int32(event.EventMessage.Action),
RawMessage: event.RawData,
ParsedMessage: event.EventJson,
BlockTimestamp: block.Header.Timestamp,
BlockHash: event.Transaction.BlockHash.String(),
BlockHeight: event.Transaction.BlockHeight,
Valid: validator.Valid,
WalletAddress: p.PubkeyToPkHashAddress(event.TxPubkey).EncodeAddress(),
Metadata: nil,
Reason: validator.Reason,
})
if err != nil {
return errors.Wrap(err, "Failed to insert event")
}
if validator.Valid {
tiers := make([][]byte, len(deploy.Tiers))
for i, tier := range deploy.Tiers {
tierJson, err := protojson.Marshal(tier)
if err != nil {
return errors.Wrap(err, "Failed to parse tiers to json")
}
tiers[i] = tierJson
}
err = qtx.CreateNodeSale(ctx, entity.NodeSale{
BlockHeight: uint64(event.Transaction.BlockHeight),
TxIndex: event.Transaction.Index,
Name: deploy.Name,
StartsAt: time.Unix(int64(deploy.StartsAt), 0),
EndsAt: time.Unix(int64(deploy.EndsAt), 0),
Tiers: tiers,
SellerPublicKey: deploy.SellerPublicKey,
MaxPerAddress: deploy.MaxPerAddress,
DeployTxHash: event.Transaction.TxHash.String(),
MaxDiscountPercentage: int32(deploy.MaxDiscountPercentage),
SellerWallet: deploy.SellerWallet,
})
if err != nil {
return errors.Wrap(err, "Failed to insert NodeSale")
}
}
return nil
}

View File

@@ -0,0 +1,139 @@
package nodesale
import (
"context"
"encoding/hex"
"testing"
"time"
"github.com/btcsuite/btcd/btcec/v2"
"github.com/gaze-network/indexer-network/common"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway/mocks"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
"github.com/gaze-network/indexer-network/modules/nodesale/protobuf"
"github.com/samber/lo"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/encoding/protojson"
)
func TestDeployInvalid(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
prvKey, err := btcec.NewPrivateKey()
require.NoError(t, err)
strangerKey, err := btcec.NewPrivateKey()
require.NoError(t, err)
strangerPubkeyHex := hex.EncodeToString(strangerKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(prvKey.PubKey())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_DEPLOY,
Deploy: &protobuf.ActionDeploy{
Name: t.Name(),
StartsAt: 100,
EndsAt: 200,
Tiers: []*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 5,
MaxPerAddress: 100,
},
},
SellerPublicKey: strangerPubkeyHex,
MaxPerAddress: 100,
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}
event, block := assembleTestEvent(prvKey, "0101010101", "0101010101", 0, 0, message)
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false
})).Return(nil)
err = p.ProcessDeploy(ctx, mockDgTx, block, event)
require.NoError(t, err)
mockDgTx.AssertNotCalled(t, "CreateNodeSale")
}
func TestDeployValid(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
privateKey, err := btcec.NewPrivateKey()
require.NoError(t, err)
pubkeyHex := hex.EncodeToString(privateKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(privateKey.PubKey())
startAt := time.Now().Add(time.Hour * -1)
endAt := time.Now().Add(time.Hour * 1)
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_DEPLOY,
Deploy: &protobuf.ActionDeploy{
Name: t.Name(),
StartsAt: uint32(startAt.UTC().Unix()),
EndsAt: uint32(endAt.UTC().Unix()),
Tiers: []*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 5,
MaxPerAddress: 100,
},
},
SellerPublicKey: pubkeyHex,
MaxPerAddress: 100,
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}
event, block := assembleTestEvent(privateKey, "0202020202", "0202020202", 0, 0, message)
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == true
})).Return(nil)
tiers := lo.Map(message.Deploy.Tiers, func(tier *protobuf.Tier, _ int) []byte {
tierJson, err := protojson.Marshal(tier)
require.NoError(t, err)
return tierJson
})
mockDgTx.EXPECT().CreateNodeSale(mock.Anything, entity.NodeSale{
BlockHeight: uint64(event.Transaction.BlockHeight),
TxIndex: uint32(event.Transaction.Index),
Name: message.Deploy.Name,
StartsAt: time.Unix(int64(message.Deploy.StartsAt), 0),
EndsAt: time.Unix(int64(message.Deploy.EndsAt), 0),
Tiers: tiers,
SellerPublicKey: message.Deploy.SellerPublicKey,
MaxPerAddress: message.Deploy.MaxPerAddress,
DeployTxHash: event.Transaction.TxHash.String(),
MaxDiscountPercentage: int32(message.Deploy.MaxDiscountPercentage),
SellerWallet: message.Deploy.SellerWallet,
}).Return(nil)
p.ProcessDeploy(ctx, mockDgTx, block, event)
}

View File

@@ -0,0 +1,55 @@
package entity
import "time"
type Block struct {
BlockHeight int64
BlockHash string
Module string
}
type Node struct {
SaleBlock uint64
SaleTxIndex uint32
NodeID uint32
TierIndex int32
DelegatedTo string
OwnerPublicKey string
PurchaseTxHash string
DelegateTxHash string
}
type NodeSale struct {
BlockHeight uint64
TxIndex uint32
Name string
StartsAt time.Time
EndsAt time.Time
Tiers [][]byte
SellerPublicKey string
MaxPerAddress uint32
DeployTxHash string
MaxDiscountPercentage int32
SellerWallet string
}
type NodeSaleEvent struct {
TxHash string
BlockHeight int64
TxIndex int32
WalletAddress string
Valid bool
Action int32
RawMessage []byte
ParsedMessage []byte
BlockTimestamp time.Time
BlockHash string
Metadata *MetadataEventPurchase
Reason string
}
type MetadataEventPurchase struct {
ExpectedTotalAmountDiscounted uint64
ReportedTotalAmount uint64
PaidTotalAmount uint64
}

View File

@@ -0,0 +1,51 @@
package delegate
import (
"context"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/validator"
"github.com/gaze-network/indexer-network/modules/nodesale/protobuf"
)
type DelegateValidator struct {
validator.Validator
}
func New() *DelegateValidator {
v := validator.New()
return &DelegateValidator{
Validator: *v,
}
}
func (v *DelegateValidator) NodesExist(
ctx context.Context,
qtx datagateway.NodeSaleDataGatewayWithTx,
deployId *protobuf.ActionID,
nodeIds []uint32,
) (bool, []entity.Node, error) {
if !v.Valid {
return false, nil, nil
}
nodes, err := qtx.GetNodesByIds(ctx, datagateway.GetNodesByIdsParams{
SaleBlock: deployId.Block,
SaleTxIndex: deployId.TxIndex,
NodeIds: nodeIds,
})
if err != nil {
v.Valid = false
return v.Valid, nil, errors.Wrap(err, "Failed to get nodes")
}
if len(nodeIds) != len(nodes) {
v.Valid = false
return v.Valid, nil, nil
}
v.Valid = true
return v.Valid, nodes, nil
}

View File

@@ -0,0 +1,6 @@
package validator
const (
INVALID_PUBKEY_FORMAT = "Cannot parse public key"
INVALID_PUBKEY = "Invalid public key"
)

View File

@@ -0,0 +1,17 @@
package purchase
const (
DEPLOYID_NOT_FOUND = "Depoloy ID not found."
PURCHASE_TIMEOUT = "Purchase timeout."
BLOCK_HEIGHT_TIMEOUT = "Block height over timeout block"
INVALID_SIGNATURE_FORMAT = "Cannot parse signature."
INVALID_SIGNATURE = "Invalid Signature."
INVALID_TIER_JSON = "Invalid Tier format"
INVALID_NODE_ID = "Invalid NodeId."
NODE_ALREADY_PURCHASED = "Some node has been purchased."
INVALID_SELLER_ADDR_FORMAT = "Invalid seller address."
INVALID_PAYMENT = "Total amount paid less than reported price"
INSUFFICIENT_FUND = "Insufficient fund"
OVER_LIMIT_PER_ADDR = "Purchase over limit per address."
OVER_LIMIT_PER_TIER = "Purchase over limit per tier."
)

View File

@@ -0,0 +1,283 @@
package purchase
import (
"context"
"encoding/hex"
"slices"
"time"
"github.com/btcsuite/btcd/btcec/v2"
"github.com/btcsuite/btcd/btcec/v2/ecdsa"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/validator"
"github.com/gaze-network/indexer-network/modules/nodesale/protobuf"
"google.golang.org/protobuf/encoding/protojson"
"google.golang.org/protobuf/proto"
)
type PurchaseValidator struct {
validator.Validator
}
func New() *PurchaseValidator {
v := validator.New()
return &PurchaseValidator{
Validator: *v,
}
}
func (v *PurchaseValidator) NodeSaleExists(ctx context.Context, qtx datagateway.NodeSaleDataGatewayWithTx, payload *protobuf.PurchasePayload) (bool, *entity.NodeSale, error) {
if !v.Valid {
return false, nil, nil
}
// check node existed
deploys, err := qtx.GetNodeSale(ctx, datagateway.GetNodeSaleParams{
BlockHeight: payload.DeployID.Block,
TxIndex: payload.DeployID.TxIndex,
})
if err != nil {
v.Valid = false
return v.Valid, nil, errors.Wrap(err, "Failed to Get NodeSale")
}
if len(deploys) < 1 {
v.Valid = false
v.Reason = DEPLOYID_NOT_FOUND
return v.Valid, nil, nil
}
v.Valid = true
return v.Valid, &deploys[0], nil
}
func (v *PurchaseValidator) ValidTimestamp(deploy *entity.NodeSale, timestamp time.Time) bool {
if !v.Valid {
return false
}
if timestamp.Before(deploy.StartsAt) ||
timestamp.After(deploy.EndsAt) {
v.Valid = false
v.Reason = PURCHASE_TIMEOUT
return v.Valid
}
v.Valid = true
return v.Valid
}
func (v *PurchaseValidator) WithinTimeoutBlock(timeOutBlock uint64, blockHeight uint64) bool {
if !v.Valid {
return false
}
if timeOutBlock == 0 {
// No timeout
v.Valid = true
return v.Valid
}
if timeOutBlock < blockHeight {
v.Valid = false
v.Reason = BLOCK_HEIGHT_TIMEOUT
return v.Valid
}
v.Valid = true
return v.Valid
}
func (v *PurchaseValidator) VerifySignature(purchase *protobuf.ActionPurchase, deploy *entity.NodeSale) bool {
if !v.Valid {
return false
}
payload := purchase.Payload
payloadBytes, _ := proto.Marshal(payload)
signatureBytes, _ := hex.DecodeString(purchase.SellerSignature)
signature, err := ecdsa.ParseSignature(signatureBytes)
if err != nil {
v.Valid = false
v.Reason = INVALID_SIGNATURE_FORMAT
return v.Valid
}
hash := chainhash.DoubleHashB(payloadBytes)
pubkeyBytes, _ := hex.DecodeString(deploy.SellerPublicKey)
pubKey, _ := btcec.ParsePubKey(pubkeyBytes)
verified := signature.Verify(hash[:], pubKey)
if !verified {
v.Valid = false
v.Reason = INVALID_SIGNATURE
return v.Valid
}
v.Valid = true
return v.Valid
}
type TierMap struct {
Tiers []protobuf.Tier
BuyingTiersCount []uint32
NodeIdToTier map[uint32]int32
}
func (v *PurchaseValidator) ValidTiers(
payload *protobuf.PurchasePayload,
deploy *entity.NodeSale,
) (bool, TierMap) {
if !v.Valid {
return false, TierMap{}
}
tiers := make([]protobuf.Tier, len(deploy.Tiers))
buyingTiersCount := make([]uint32, len(tiers))
nodeIdToTier := make(map[uint32]int32)
for i, tierJson := range deploy.Tiers {
tier := &tiers[i]
err := protojson.Unmarshal(tierJson, tier)
if err != nil {
v.Valid = false
v.Reason = INVALID_TIER_JSON
return v.Valid, TierMap{}
}
}
slices.Sort(payload.NodeIDs)
var currentTier int32 = -1
var tierSum uint32 = 0
for _, nodeId := range payload.NodeIDs {
for nodeId >= tierSum && currentTier < int32(len(tiers)-1) {
currentTier++
tierSum += tiers[currentTier].Limit
}
if nodeId < tierSum {
buyingTiersCount[currentTier]++
nodeIdToTier[nodeId] = currentTier
} else {
v.Valid = false
v.Reason = INVALID_NODE_ID
return false, TierMap{}
}
}
v.Valid = true
return v.Valid, TierMap{
Tiers: tiers,
BuyingTiersCount: buyingTiersCount,
NodeIdToTier: nodeIdToTier,
}
}
func (v *PurchaseValidator) ValidUnpurchasedNodes(
ctx context.Context,
qtx datagateway.NodeSaleDataGatewayWithTx,
payload *protobuf.PurchasePayload,
) (bool, error) {
if !v.Valid {
return false, nil
}
// valid unpurchased node ID
nodes, err := qtx.GetNodesByIds(ctx, datagateway.GetNodesByIdsParams{
SaleBlock: payload.DeployID.Block,
SaleTxIndex: payload.DeployID.TxIndex,
NodeIds: payload.NodeIDs,
})
if err != nil {
v.Valid = false
return v.Valid, errors.Wrap(err, "Failed to Get nodes")
}
if len(nodes) > 0 {
v.Valid = false
v.Reason = NODE_ALREADY_PURCHASED
return false, nil
}
v.Valid = true
return true, nil
}
func (v *PurchaseValidator) ValidPaidAmount(
payload *protobuf.PurchasePayload,
deploy *entity.NodeSale,
txPaid uint64,
tiers []protobuf.Tier,
buyingTiersCount []uint32,
network *chaincfg.Params,
) (bool, *entity.MetadataEventPurchase) {
if !v.Valid {
return false, nil
}
meta := entity.MetadataEventPurchase{}
meta.PaidTotalAmount = txPaid
meta.ReportedTotalAmount = uint64(payload.TotalAmountSat)
// total amount paid is greater than report paid
if txPaid < uint64(payload.TotalAmountSat) {
v.Valid = false
v.Reason = INVALID_PAYMENT
return v.Valid, nil
}
// calculate total price
var totalPrice uint64 = 0
for i := 0; i < len(tiers); i++ {
totalPrice += uint64(buyingTiersCount[i] * tiers[i].PriceSat)
}
// report paid is greater than max discounted total price
maxDiscounted := totalPrice * (100 - uint64(deploy.MaxDiscountPercentage))
decimal := maxDiscounted % 100
maxDiscounted /= 100
if decimal%100 >= 50 {
maxDiscounted++
}
meta.ExpectedTotalAmountDiscounted = maxDiscounted
if uint64(payload.TotalAmountSat) < maxDiscounted {
v.Valid = false
v.Reason = INSUFFICIENT_FUND
return v.Valid, nil
}
v.Valid = true
return v.Valid, &meta
}
func (v *PurchaseValidator) WithinLimit(
ctx context.Context,
qtx datagateway.NodeSaleDataGatewayWithTx,
payload *protobuf.PurchasePayload,
deploy *entity.NodeSale,
tiers []protobuf.Tier,
buyingTiersCount []uint32,
) (bool, error) {
if !v.Valid {
return false, nil
}
// check node limit
// get all selled by seller and owned by buyer
buyerOwnedNodes, err := qtx.GetNodesByOwner(ctx, datagateway.GetNodesByOwnerParams{
SaleBlock: deploy.BlockHeight,
SaleTxIndex: deploy.TxIndex,
OwnerPublicKey: payload.BuyerPublicKey,
})
if err != nil {
v.Valid = false
return v.Valid, errors.Wrap(err, "Failed to GetNodesByOwner")
}
if len(buyerOwnedNodes)+len(payload.NodeIDs) > int(deploy.MaxPerAddress) {
v.Valid = false
v.Reason = "Purchase over limit per address."
return v.Valid, nil
}
// check limit
// count each tiers
// check limited for each tier
ownedTiersCount := make([]uint32, len(tiers))
for _, node := range buyerOwnedNodes {
ownedTiersCount[node.TierIndex]++
}
for i := 0; i < len(tiers); i++ {
if ownedTiersCount[i]+buyingTiersCount[i] > tiers[i].MaxPerAddress {
v.Valid = false
v.Reason = "Purchase over limit per tier."
return v.Valid, nil
}
}
v.Valid = true
return v.Valid, nil
}

View File

@@ -0,0 +1,44 @@
package validator
import (
"bytes"
"encoding/hex"
"github.com/btcsuite/btcd/btcec/v2"
)
type Validator struct {
Valid bool
Reason string
}
func New() *Validator {
return &Validator{
Valid: true,
}
}
func (v *Validator) EqualXonlyPublicKey(target string, expected *btcec.PublicKey) bool {
if !v.Valid {
return false
}
targetBytes, err := hex.DecodeString(target)
if err != nil {
v.Valid = false
v.Reason = INVALID_PUBKEY_FORMAT
}
targetPubKey, err := btcec.ParsePubKey(targetBytes)
if err != nil {
v.Valid = false
v.Reason = INVALID_PUBKEY_FORMAT
}
xOnlyTargetPubKey := btcec.ToSerialized(targetPubKey).SchnorrSerialized()
xOnlyExpectedPubKey := btcec.ToSerialized(expected).SchnorrSerialized()
v.Valid = bytes.Equal(xOnlyTargetPubKey[:], xOnlyExpectedPubKey[:])
if !v.Valid {
v.Reason = INVALID_PUBKEY
}
return v.Valid
}

View File

@@ -0,0 +1,61 @@
package nodesale
import (
"context"
"fmt"
"github.com/btcsuite/btcd/rpcclient"
"github.com/gaze-network/indexer-network/core/datasources"
"github.com/gaze-network/indexer-network/core/indexer"
"github.com/gaze-network/indexer-network/internal/config"
"github.com/gaze-network/indexer-network/internal/postgres"
"github.com/gaze-network/indexer-network/modules/nodesale/api/httphandler"
repository "github.com/gaze-network/indexer-network/modules/nodesale/repository/postgres"
"github.com/gaze-network/indexer-network/pkg/logger"
"github.com/gofiber/fiber/v2"
"github.com/samber/do/v2"
)
var NODESALE_MAGIC = []byte{0x6e, 0x73, 0x6f, 0x70}
const (
Version = "v0.0.1-alpha"
)
func New(injector do.Injector) (indexer.IndexerWorker, error) {
ctx := do.MustInvoke[context.Context](injector)
conf := do.MustInvoke[config.Config](injector)
btcClient := do.MustInvoke[*rpcclient.Client](injector)
datasource := datasources.NewBitcoinNode(btcClient)
pg, err := postgres.NewPool(ctx, conf.Modules.NodeSale.Postgres)
if err != nil {
return nil, fmt.Errorf("Can't create postgres connection : %w", err)
}
var cleanupFuncs []func(context.Context) error
cleanupFuncs = append(cleanupFuncs, func(ctx context.Context) error {
pg.Close()
return nil
})
repository := repository.NewRepository(pg)
processor := &Processor{
NodeSaleDg: repository,
BtcClient: datasource,
Network: conf.Network,
cleanupFuncs: cleanupFuncs,
lastBlockDefault: conf.Modules.NodeSale.LastBlockDefault,
}
httpServer := do.MustInvoke[*fiber.App](injector)
nodeSaleHandler := httphandler.New(repository)
if err := nodeSaleHandler.Mount(httpServer); err != nil {
return nil, fmt.Errorf("Can't mount nodesale API : %w", err)
}
logger.InfoContext(ctx, "Mounted nodesale HTTP handler")
indexer := indexer.New(processor, datasource)
logger.InfoContext(ctx, "NodeSale module started.")
return indexer, nil
}

View File

@@ -0,0 +1,61 @@
package nodesale
import (
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/txscript"
"github.com/decred/dcrd/dcrec/secp256k1/v4"
"github.com/gaze-network/indexer-network/core/types"
"github.com/gaze-network/indexer-network/modules/nodesale/protobuf"
"google.golang.org/protobuf/encoding/protojson"
"google.golang.org/protobuf/proto"
)
var (
testBlockHeight uint64 = 101
testTxIndex uint32 = 1
)
func assembleTestEvent(privateKey *secp256k1.PrivateKey, blockHashHex, txHashHex string, blockHeight uint64, txIndex uint32, message *protobuf.NodeSaleEvent) (NodeSaleEvent, *types.Block) {
blockHash, _ := chainhash.NewHashFromStr(blockHashHex)
txHash, _ := chainhash.NewHashFromStr(txHashHex)
rawData, _ := proto.Marshal(message)
builder := txscript.NewScriptBuilder()
builder.AddOp(txscript.OP_FALSE)
builder.AddOp(txscript.OP_IF)
builder.AddData(rawData)
builder.AddOp(txscript.OP_ENDIF)
messageJson, _ := protojson.Marshal(message)
if blockHeight == 0 {
blockHeight = testBlockHeight
testBlockHeight++
}
if txIndex == 0 {
txIndex = testTxIndex
testTxIndex++
}
event := NodeSaleEvent{
Transaction: &types.Transaction{
BlockHeight: int64(blockHeight),
BlockHash: *blockHash,
Index: uint32(txIndex),
TxHash: *txHash,
},
RawData: rawData,
EventMessage: message,
EventJson: messageJson,
TxPubkey: privateKey.PubKey(),
}
block := &types.Block{
Header: types.BlockHeader{
Timestamp: time.Now().UTC(),
},
}
return event, block
}

View File

@@ -0,0 +1,303 @@
package nodesale
import (
"bytes"
"context"
"github.com/btcsuite/btcd/btcec/v2"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/txscript"
"github.com/gaze-network/indexer-network/common"
"github.com/gaze-network/indexer-network/core/indexer"
"github.com/gaze-network/indexer-network/core/types"
"github.com/gaze-network/indexer-network/pkg/logger"
"github.com/gaze-network/indexer-network/pkg/logger/slogx"
"google.golang.org/protobuf/encoding/protojson"
"google.golang.org/protobuf/proto"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/core/datasources"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
"github.com/gaze-network/indexer-network/modules/nodesale/protobuf"
)
type NodeSaleEvent struct {
Transaction *types.Transaction
EventMessage *protobuf.NodeSaleEvent
EventJson []byte
TxPubkey *btcec.PublicKey
RawData []byte
InputValue uint64
}
func NewProcessor(repository datagateway.NodeSaleDataGateway,
datasource *datasources.BitcoinNodeDatasource,
network common.Network,
cleanupFuncs []func(context.Context) error,
lastBlockDefault int64,
) *Processor {
return &Processor{
NodeSaleDg: repository,
BtcClient: datasource,
Network: network,
cleanupFuncs: cleanupFuncs,
lastBlockDefault: lastBlockDefault,
}
}
func (p *Processor) Shutdown(ctx context.Context) error {
for _, cleanupFunc := range p.cleanupFuncs {
err := cleanupFunc(ctx)
if err != nil {
return errors.Wrap(err, "cleanup function error")
}
}
return nil
}
type Processor struct {
NodeSaleDg datagateway.NodeSaleDataGateway
BtcClient *datasources.BitcoinNodeDatasource
Network common.Network
cleanupFuncs []func(context.Context) error
lastBlockDefault int64
}
// CurrentBlock implements indexer.Processor.
func (p *Processor) CurrentBlock(ctx context.Context) (types.BlockHeader, error) {
block, err := p.NodeSaleDg.GetLastProcessedBlock(ctx)
if err != nil {
logger.InfoContext(ctx, "Couldn't get last processed block. Start from NODESALE_LAST_BLOCK_DEFAULT.",
slogx.Int64("currentBlock", p.lastBlockDefault))
header, err := p.BtcClient.GetBlockHeader(ctx, p.lastBlockDefault)
if err != nil {
return types.BlockHeader{}, errors.Wrap(err, "Cannot get default block from bitcoin node")
}
return types.BlockHeader{
Hash: header.Hash,
Height: p.lastBlockDefault,
}, nil
}
hash, err := chainhash.NewHashFromStr(block.BlockHash)
if err != nil {
logger.PanicContext(ctx, "Invalid hash format found in Database.")
}
return types.BlockHeader{
Hash: *hash,
Height: block.BlockHeight,
}, nil
}
// GetIndexedBlock implements indexer.Processor.
func (p *Processor) GetIndexedBlock(ctx context.Context, height int64) (types.BlockHeader, error) {
block, err := p.NodeSaleDg.GetBlock(ctx, height)
if err != nil {
return types.BlockHeader{}, errors.Wrapf(err, "Block %d not found", height)
}
hash, err := chainhash.NewHashFromStr(block.BlockHash)
if err != nil {
logger.PanicContext(ctx, "Invalid hash format found in Database.")
}
return types.BlockHeader{
Hash: *hash,
Height: block.BlockHeight,
}, nil
}
// Name implements indexer.Processor.
func (p *Processor) Name() string {
return "nodesale"
}
func extractNodeSaleData(witness [][]byte) (data []byte, internalPubkey *btcec.PublicKey, isNodeSale bool) {
tokenizer, controlBlock, isTapScript := extractTapScript(witness)
if !isTapScript {
return []byte{}, nil, false
}
state := 0
for tokenizer.Next() {
switch state {
case 0:
if tokenizer.Opcode() == txscript.OP_0 {
state++
} else {
state = 0
}
case 1:
if tokenizer.Opcode() == txscript.OP_IF {
state++
} else {
state = 0
}
case 2:
if tokenizer.Opcode() == txscript.OP_DATA_4 &&
bytes.Equal(tokenizer.Data(), NODESALE_MAGIC) {
state++
} else {
state = 0
}
case 3:
// Any instruction > txscript.OP_16 is not push data. Note: txscript.OP_PUSHDATAX < txscript.OP_16
if tokenizer.Opcode() <= txscript.OP_16 {
data := tokenizer.Data()
return data, controlBlock.InternalKey, true
}
state = 0
}
}
return []byte{}, nil, false
}
func (p *Processor) parseTransactions(ctx context.Context, transactions []*types.Transaction) ([]NodeSaleEvent, error) {
var events []NodeSaleEvent
for _, t := range transactions {
for _, txIn := range t.TxIn {
data, txPubkey, isNodeSale := extractNodeSaleData(txIn.Witness)
if !isNodeSale {
continue
}
event := &protobuf.NodeSaleEvent{}
err := proto.Unmarshal(data, event)
if err != nil {
logger.WarnContext(ctx, "Invalid Protobuf",
slogx.String("block_hash", t.BlockHash.String()),
slogx.Int("txIndex", int(t.Index)))
continue
}
eventJson, err := protojson.Marshal(event)
if err != nil {
return []NodeSaleEvent{}, errors.Wrap(err, "Failed to parse protobuf to json")
}
prevTx, _, err := p.BtcClient.GetRawTransactionAndHeightByTxHash(ctx, txIn.PreviousOutTxHash)
if err != nil {
return nil, errors.Wrap(err, "Failed to get Previous transaction data")
}
if txIn.PreviousOutIndex >= uint32(len(prevTx.TxOut)) {
return nil, errors.Wrap(err, "Invalid previous transaction from bitcoin")
}
events = append(events, NodeSaleEvent{
Transaction: t,
EventMessage: event,
EventJson: eventJson,
RawData: data,
TxPubkey: txPubkey,
InputValue: uint64(prevTx.TxOut[txIn.PreviousOutIndex].Value),
})
}
}
return events, nil
}
// Process implements indexer.Processor.
func (p *Processor) Process(ctx context.Context, inputs []*types.Block) error {
for _, block := range inputs {
logger.InfoContext(ctx, "NodeSale processing a block",
slogx.Int64("block", block.Header.Height),
slogx.Stringer("hash", block.Header.Hash))
// parse all event from each transaction including reading tx wallet
events, err := p.parseTransactions(ctx, block.Transactions)
if err != nil {
return errors.Wrap(err, "Invalid data from bitcoin client")
}
// open transaction
qtx, err := p.NodeSaleDg.BeginNodeSaleTx(ctx)
if err != nil {
return errors.Wrap(err, "Failed to create transaction")
}
defer func() {
err = qtx.Rollback(ctx)
if err != nil {
logger.PanicContext(ctx, "Failed to rollback db")
}
}()
// write block
err = qtx.CreateBlock(ctx, entity.Block{
BlockHeight: block.Header.Height,
BlockHash: block.Header.Hash.String(),
Module: p.Name(),
})
if err != nil {
return errors.Wrapf(err, "Failed to add block %d", block.Header.Height)
}
// for each events
for _, event := range events {
logger.InfoContext(ctx, "NodeSale processing event",
slogx.Uint32("txIndex", event.Transaction.Index),
slogx.Int64("blockHeight", block.Header.Height),
slogx.Stringer("blockhash", block.Header.Hash),
)
eventMessage := event.EventMessage
switch eventMessage.Action {
case protobuf.Action_ACTION_DEPLOY:
err = p.ProcessDeploy(ctx, qtx, block, event)
if err != nil {
return errors.Wrapf(err, "Failed to deploy at block %d", block.Header.Height)
}
case protobuf.Action_ACTION_DELEGATE:
err = p.ProcessDelegate(ctx, qtx, block, event)
if err != nil {
return errors.Wrapf(err, "Failed to delegate at block %d", block.Header.Height)
}
case protobuf.Action_ACTION_PURCHASE:
err = p.ProcessPurchase(ctx, qtx, block, event)
if err != nil {
return errors.Wrapf(err, "Failed to purchase at block %d", block.Header.Height)
}
default:
logger.DebugContext(ctx, "Invalid event ACTION", slogx.Stringer("txHash", (event.Transaction.TxHash)))
}
}
// close transaction
err = qtx.Commit(ctx)
if err != nil {
return errors.Wrap(err, "Failed to commit transaction")
}
logger.InfoContext(ctx, "NodeSale finished processing block",
slogx.Int64("block", block.Header.Height),
slogx.Stringer("hash", block.Header.Hash))
}
return nil
}
// RevertData implements indexer.Processor.
func (p *Processor) RevertData(ctx context.Context, from int64) error {
qtx, err := p.NodeSaleDg.BeginNodeSaleTx(ctx)
if err != nil {
return errors.Wrap(err, "Failed to create transaction")
}
defer func() { err = qtx.Rollback(ctx) }()
_, err = qtx.RemoveBlockFrom(ctx, from)
if err != nil {
return errors.Wrap(err, "Failed to remove blocks.")
}
affected, err := qtx.RemoveEventsFromBlock(ctx, from)
if err != nil {
return errors.Wrap(err, "Failed to remove events.")
}
_, err = qtx.ClearDelegate(ctx)
if err != nil {
return errors.Wrap(err, "Failed to clear delegate from nodes")
}
err = qtx.Commit(ctx)
if err != nil {
return errors.Wrap(err, "Failed to commit transaction")
}
logger.InfoContext(ctx, "Events removed",
slogx.Int64("Total removed", affected))
return nil
}
// VerifyStates implements indexer.Processor.
func (p *Processor) VerifyStates(ctx context.Context) error {
panic("unimplemented")
}
var _ indexer.Processor[*types.Block] = (*Processor)(nil)

View File

@@ -0,0 +1,806 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.34.1
// protoc v5.26.1
// source: modules/nodesale/protobuf/nodesale.proto
// protoc modules/nodesale/protobuf/nodesale.proto --go_out=. --go_opt=module=github.com/gaze-network/indexer-network
package protobuf
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type Action int32
const (
Action_ACTION_DEPLOY Action = 0
Action_ACTION_PURCHASE Action = 1
Action_ACTION_DELEGATE Action = 2
)
// Enum value maps for Action.
var (
Action_name = map[int32]string{
0: "ACTION_DEPLOY",
1: "ACTION_PURCHASE",
2: "ACTION_DELEGATE",
}
Action_value = map[string]int32{
"ACTION_DEPLOY": 0,
"ACTION_PURCHASE": 1,
"ACTION_DELEGATE": 2,
}
)
func (x Action) Enum() *Action {
p := new(Action)
*p = x
return p
}
func (x Action) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (Action) Descriptor() protoreflect.EnumDescriptor {
return file_modules_nodesale_protobuf_nodesale_proto_enumTypes[0].Descriptor()
}
func (Action) Type() protoreflect.EnumType {
return &file_modules_nodesale_protobuf_nodesale_proto_enumTypes[0]
}
func (x Action) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use Action.Descriptor instead.
func (Action) EnumDescriptor() ([]byte, []int) {
return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{0}
}
type NodeSaleEvent struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Action Action `protobuf:"varint,1,opt,name=action,proto3,enum=nodesale.Action" json:"action,omitempty"`
Deploy *ActionDeploy `protobuf:"bytes,2,opt,name=deploy,proto3,oneof" json:"deploy,omitempty"`
Purchase *ActionPurchase `protobuf:"bytes,3,opt,name=purchase,proto3,oneof" json:"purchase,omitempty"`
Delegate *ActionDelegate `protobuf:"bytes,4,opt,name=delegate,proto3,oneof" json:"delegate,omitempty"`
}
func (x *NodeSaleEvent) Reset() {
*x = NodeSaleEvent{}
if protoimpl.UnsafeEnabled {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *NodeSaleEvent) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*NodeSaleEvent) ProtoMessage() {}
func (x *NodeSaleEvent) ProtoReflect() protoreflect.Message {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use NodeSaleEvent.ProtoReflect.Descriptor instead.
func (*NodeSaleEvent) Descriptor() ([]byte, []int) {
return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{0}
}
func (x *NodeSaleEvent) GetAction() Action {
if x != nil {
return x.Action
}
return Action_ACTION_DEPLOY
}
func (x *NodeSaleEvent) GetDeploy() *ActionDeploy {
if x != nil {
return x.Deploy
}
return nil
}
func (x *NodeSaleEvent) GetPurchase() *ActionPurchase {
if x != nil {
return x.Purchase
}
return nil
}
func (x *NodeSaleEvent) GetDelegate() *ActionDelegate {
if x != nil {
return x.Delegate
}
return nil
}
type ActionDeploy struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
StartsAt uint32 `protobuf:"varint,2,opt,name=startsAt,proto3" json:"startsAt,omitempty"`
EndsAt uint32 `protobuf:"varint,3,opt,name=endsAt,proto3" json:"endsAt,omitempty"`
Tiers []*Tier `protobuf:"bytes,4,rep,name=tiers,proto3" json:"tiers,omitempty"`
SellerPublicKey string `protobuf:"bytes,5,opt,name=sellerPublicKey,proto3" json:"sellerPublicKey,omitempty"`
MaxPerAddress uint32 `protobuf:"varint,6,opt,name=maxPerAddress,proto3" json:"maxPerAddress,omitempty"`
MaxDiscountPercentage uint32 `protobuf:"varint,7,opt,name=maxDiscountPercentage,proto3" json:"maxDiscountPercentage,omitempty"`
SellerWallet string `protobuf:"bytes,8,opt,name=sellerWallet,proto3" json:"sellerWallet,omitempty"`
}
func (x *ActionDeploy) Reset() {
*x = ActionDeploy{}
if protoimpl.UnsafeEnabled {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ActionDeploy) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ActionDeploy) ProtoMessage() {}
func (x *ActionDeploy) ProtoReflect() protoreflect.Message {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ActionDeploy.ProtoReflect.Descriptor instead.
func (*ActionDeploy) Descriptor() ([]byte, []int) {
return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{1}
}
func (x *ActionDeploy) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *ActionDeploy) GetStartsAt() uint32 {
if x != nil {
return x.StartsAt
}
return 0
}
func (x *ActionDeploy) GetEndsAt() uint32 {
if x != nil {
return x.EndsAt
}
return 0
}
func (x *ActionDeploy) GetTiers() []*Tier {
if x != nil {
return x.Tiers
}
return nil
}
func (x *ActionDeploy) GetSellerPublicKey() string {
if x != nil {
return x.SellerPublicKey
}
return ""
}
func (x *ActionDeploy) GetMaxPerAddress() uint32 {
if x != nil {
return x.MaxPerAddress
}
return 0
}
func (x *ActionDeploy) GetMaxDiscountPercentage() uint32 {
if x != nil {
return x.MaxDiscountPercentage
}
return 0
}
func (x *ActionDeploy) GetSellerWallet() string {
if x != nil {
return x.SellerWallet
}
return ""
}
type Tier struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
PriceSat uint32 `protobuf:"varint,1,opt,name=priceSat,proto3" json:"priceSat,omitempty"`
Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"`
MaxPerAddress uint32 `protobuf:"varint,3,opt,name=maxPerAddress,proto3" json:"maxPerAddress,omitempty"`
}
func (x *Tier) Reset() {
*x = Tier{}
if protoimpl.UnsafeEnabled {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Tier) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Tier) ProtoMessage() {}
func (x *Tier) ProtoReflect() protoreflect.Message {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Tier.ProtoReflect.Descriptor instead.
func (*Tier) Descriptor() ([]byte, []int) {
return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{2}
}
func (x *Tier) GetPriceSat() uint32 {
if x != nil {
return x.PriceSat
}
return 0
}
func (x *Tier) GetLimit() uint32 {
if x != nil {
return x.Limit
}
return 0
}
func (x *Tier) GetMaxPerAddress() uint32 {
if x != nil {
return x.MaxPerAddress
}
return 0
}
type ActionPurchase struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Payload *PurchasePayload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"`
SellerSignature string `protobuf:"bytes,2,opt,name=sellerSignature,proto3" json:"sellerSignature,omitempty"`
}
func (x *ActionPurchase) Reset() {
*x = ActionPurchase{}
if protoimpl.UnsafeEnabled {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ActionPurchase) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ActionPurchase) ProtoMessage() {}
func (x *ActionPurchase) ProtoReflect() protoreflect.Message {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ActionPurchase.ProtoReflect.Descriptor instead.
func (*ActionPurchase) Descriptor() ([]byte, []int) {
return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{3}
}
func (x *ActionPurchase) GetPayload() *PurchasePayload {
if x != nil {
return x.Payload
}
return nil
}
func (x *ActionPurchase) GetSellerSignature() string {
if x != nil {
return x.SellerSignature
}
return ""
}
type PurchasePayload struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
DeployID *ActionID `protobuf:"bytes,1,opt,name=deployID,proto3" json:"deployID,omitempty"`
BuyerPublicKey string `protobuf:"bytes,2,opt,name=buyerPublicKey,proto3" json:"buyerPublicKey,omitempty"`
NodeIDs []uint32 `protobuf:"varint,3,rep,packed,name=nodeIDs,proto3" json:"nodeIDs,omitempty"`
TotalAmountSat int64 `protobuf:"varint,4,opt,name=totalAmountSat,proto3" json:"totalAmountSat,omitempty"`
TimeOutBlock uint64 `protobuf:"varint,5,opt,name=timeOutBlock,proto3" json:"timeOutBlock,omitempty"`
}
func (x *PurchasePayload) Reset() {
*x = PurchasePayload{}
if protoimpl.UnsafeEnabled {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *PurchasePayload) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*PurchasePayload) ProtoMessage() {}
func (x *PurchasePayload) ProtoReflect() protoreflect.Message {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use PurchasePayload.ProtoReflect.Descriptor instead.
func (*PurchasePayload) Descriptor() ([]byte, []int) {
return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{4}
}
func (x *PurchasePayload) GetDeployID() *ActionID {
if x != nil {
return x.DeployID
}
return nil
}
func (x *PurchasePayload) GetBuyerPublicKey() string {
if x != nil {
return x.BuyerPublicKey
}
return ""
}
func (x *PurchasePayload) GetNodeIDs() []uint32 {
if x != nil {
return x.NodeIDs
}
return nil
}
func (x *PurchasePayload) GetTotalAmountSat() int64 {
if x != nil {
return x.TotalAmountSat
}
return 0
}
func (x *PurchasePayload) GetTimeOutBlock() uint64 {
if x != nil {
return x.TimeOutBlock
}
return 0
}
type ActionID struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Block uint64 `protobuf:"varint,1,opt,name=block,proto3" json:"block,omitempty"`
TxIndex uint32 `protobuf:"varint,2,opt,name=txIndex,proto3" json:"txIndex,omitempty"`
}
func (x *ActionID) Reset() {
*x = ActionID{}
if protoimpl.UnsafeEnabled {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ActionID) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ActionID) ProtoMessage() {}
func (x *ActionID) ProtoReflect() protoreflect.Message {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ActionID.ProtoReflect.Descriptor instead.
func (*ActionID) Descriptor() ([]byte, []int) {
return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{5}
}
func (x *ActionID) GetBlock() uint64 {
if x != nil {
return x.Block
}
return 0
}
func (x *ActionID) GetTxIndex() uint32 {
if x != nil {
return x.TxIndex
}
return 0
}
type ActionDelegate struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
DelegateePublicKey string `protobuf:"bytes,1,opt,name=delegateePublicKey,proto3" json:"delegateePublicKey,omitempty"`
NodeIDs []uint32 `protobuf:"varint,2,rep,packed,name=nodeIDs,proto3" json:"nodeIDs,omitempty"`
DeployID *ActionID `protobuf:"bytes,3,opt,name=deployID,proto3" json:"deployID,omitempty"`
}
func (x *ActionDelegate) Reset() {
*x = ActionDelegate{}
if protoimpl.UnsafeEnabled {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ActionDelegate) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ActionDelegate) ProtoMessage() {}
func (x *ActionDelegate) ProtoReflect() protoreflect.Message {
mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ActionDelegate.ProtoReflect.Descriptor instead.
func (*ActionDelegate) Descriptor() ([]byte, []int) {
return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{6}
}
func (x *ActionDelegate) GetDelegateePublicKey() string {
if x != nil {
return x.DelegateePublicKey
}
return ""
}
func (x *ActionDelegate) GetNodeIDs() []uint32 {
if x != nil {
return x.NodeIDs
}
return nil
}
func (x *ActionDelegate) GetDeployID() *ActionID {
if x != nil {
return x.DeployID
}
return nil
}
var File_modules_nodesale_protobuf_nodesale_proto protoreflect.FileDescriptor
var file_modules_nodesale_protobuf_nodesale_proto_rawDesc = []byte{
0x0a, 0x28, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x2f, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61,
0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x6e, 0x6f, 0x64, 0x65,
0x73, 0x61, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x08, 0x6e, 0x6f, 0x64, 0x65,
0x73, 0x61, 0x6c, 0x65, 0x22, 0x89, 0x02, 0x0a, 0x0d, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x61, 0x6c,
0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x28, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e,
0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x10, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c,
0x65, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e,
0x12, 0x33, 0x0a, 0x06, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x16, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2e, 0x41, 0x63, 0x74, 0x69,
0x6f, 0x6e, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x48, 0x00, 0x52, 0x06, 0x64, 0x65, 0x70, 0x6c,
0x6f, 0x79, 0x88, 0x01, 0x01, 0x12, 0x39, 0x0a, 0x08, 0x70, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73,
0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61,
0x6c, 0x65, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73,
0x65, 0x48, 0x01, 0x52, 0x08, 0x70, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73, 0x65, 0x88, 0x01, 0x01,
0x12, 0x39, 0x0a, 0x08, 0x64, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2e, 0x41, 0x63,
0x74, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x48, 0x02, 0x52, 0x08,
0x64, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f,
0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x75, 0x72, 0x63, 0x68,
0x61, 0x73, 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x64, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65,
0x22, 0xa6, 0x02, 0x0a, 0x0c, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x70, 0x6c, 0x6f,
0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x41,
0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x41,
0x74, 0x12, 0x16, 0x0a, 0x06, 0x65, 0x6e, 0x64, 0x73, 0x41, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28,
0x0d, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x73, 0x41, 0x74, 0x12, 0x24, 0x0a, 0x05, 0x74, 0x69, 0x65,
0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73,
0x61, 0x6c, 0x65, 0x2e, 0x54, 0x69, 0x65, 0x72, 0x52, 0x05, 0x74, 0x69, 0x65, 0x72, 0x73, 0x12,
0x28, 0x0a, 0x0f, 0x73, 0x65, 0x6c, 0x6c, 0x65, 0x72, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b,
0x65, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x73, 0x65, 0x6c, 0x6c, 0x65, 0x72,
0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x24, 0x0a, 0x0d, 0x6d, 0x61, 0x78,
0x50, 0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0d,
0x52, 0x0d, 0x6d, 0x61, 0x78, 0x50, 0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12,
0x34, 0x0a, 0x15, 0x6d, 0x61, 0x78, 0x44, 0x69, 0x73, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x50, 0x65,
0x72, 0x63, 0x65, 0x6e, 0x74, 0x61, 0x67, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x15,
0x6d, 0x61, 0x78, 0x44, 0x69, 0x73, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x50, 0x65, 0x72, 0x63, 0x65,
0x6e, 0x74, 0x61, 0x67, 0x65, 0x12, 0x22, 0x0a, 0x0c, 0x73, 0x65, 0x6c, 0x6c, 0x65, 0x72, 0x57,
0x61, 0x6c, 0x6c, 0x65, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x73, 0x65, 0x6c,
0x6c, 0x65, 0x72, 0x57, 0x61, 0x6c, 0x6c, 0x65, 0x74, 0x22, 0x5e, 0x0a, 0x04, 0x54, 0x69, 0x65,
0x72, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x63, 0x65, 0x53, 0x61, 0x74, 0x18, 0x01, 0x20,
0x01, 0x28, 0x0d, 0x52, 0x08, 0x70, 0x72, 0x69, 0x63, 0x65, 0x53, 0x61, 0x74, 0x12, 0x14, 0x0a,
0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x6c, 0x69,
0x6d, 0x69, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x6d, 0x61, 0x78, 0x50, 0x65, 0x72, 0x41, 0x64, 0x64,
0x72, 0x65, 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x6d, 0x61, 0x78, 0x50,
0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x22, 0x6f, 0x0a, 0x0e, 0x41, 0x63, 0x74,
0x69, 0x6f, 0x6e, 0x50, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x07, 0x70,
0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x6e,
0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2e, 0x50, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73, 0x65,
0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64,
0x12, 0x28, 0x0a, 0x0f, 0x73, 0x65, 0x6c, 0x6c, 0x65, 0x72, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74,
0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x73, 0x65, 0x6c, 0x6c, 0x65,
0x72, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xcf, 0x01, 0x0a, 0x0f, 0x50,
0x75, 0x72, 0x63, 0x68, 0x61, 0x73, 0x65, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x2e,
0x0a, 0x08, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x12, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2e, 0x41, 0x63, 0x74, 0x69,
0x6f, 0x6e, 0x49, 0x44, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x49, 0x44, 0x12, 0x26,
0x0a, 0x0e, 0x62, 0x75, 0x79, 0x65, 0x72, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79,
0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x62, 0x75, 0x79, 0x65, 0x72, 0x50, 0x75, 0x62,
0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x44,
0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0d, 0x52, 0x07, 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x44, 0x73,
0x12, 0x26, 0x0a, 0x0e, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x41, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x53,
0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0e, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x41,
0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x53, 0x61, 0x74, 0x12, 0x22, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65,
0x4f, 0x75, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c,
0x74, 0x69, 0x6d, 0x65, 0x4f, 0x75, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x3a, 0x0a, 0x08,
0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x44, 0x12, 0x14, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63,
0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x18,
0x0a, 0x07, 0x74, 0x78, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52,
0x07, 0x74, 0x78, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x8a, 0x01, 0x0a, 0x0e, 0x41, 0x63, 0x74,
0x69, 0x6f, 0x6e, 0x44, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x2e, 0x0a, 0x12, 0x64,
0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65,
0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x64, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74,
0x65, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x6e,
0x6f, 0x64, 0x65, 0x49, 0x44, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0d, 0x52, 0x07, 0x6e, 0x6f,
0x64, 0x65, 0x49, 0x44, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x49,
0x44, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61,
0x6c, 0x65, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x44, 0x52, 0x08, 0x64, 0x65, 0x70,
0x6c, 0x6f, 0x79, 0x49, 0x44, 0x2a, 0x45, 0x0a, 0x06, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12,
0x11, 0x0a, 0x0d, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x44, 0x45, 0x50, 0x4c, 0x4f, 0x59,
0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x50, 0x55, 0x52,
0x43, 0x48, 0x41, 0x53, 0x45, 0x10, 0x01, 0x12, 0x13, 0x0a, 0x0f, 0x41, 0x43, 0x54, 0x49, 0x4f,
0x4e, 0x5f, 0x44, 0x45, 0x4c, 0x45, 0x47, 0x41, 0x54, 0x45, 0x10, 0x02, 0x42, 0x43, 0x5a, 0x41,
0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x61, 0x7a, 0x65, 0x2d,
0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x2f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x72, 0x2d,
0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x2f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x2f,
0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75,
0x66, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_modules_nodesale_protobuf_nodesale_proto_rawDescOnce sync.Once
file_modules_nodesale_protobuf_nodesale_proto_rawDescData = file_modules_nodesale_protobuf_nodesale_proto_rawDesc
)
func file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP() []byte {
file_modules_nodesale_protobuf_nodesale_proto_rawDescOnce.Do(func() {
file_modules_nodesale_protobuf_nodesale_proto_rawDescData = protoimpl.X.CompressGZIP(file_modules_nodesale_protobuf_nodesale_proto_rawDescData)
})
return file_modules_nodesale_protobuf_nodesale_proto_rawDescData
}
var file_modules_nodesale_protobuf_nodesale_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_modules_nodesale_protobuf_nodesale_proto_msgTypes = make([]protoimpl.MessageInfo, 7)
var file_modules_nodesale_protobuf_nodesale_proto_goTypes = []interface{}{
(Action)(0), // 0: nodesale.Action
(*NodeSaleEvent)(nil), // 1: nodesale.NodeSaleEvent
(*ActionDeploy)(nil), // 2: nodesale.ActionDeploy
(*Tier)(nil), // 3: nodesale.Tier
(*ActionPurchase)(nil), // 4: nodesale.ActionPurchase
(*PurchasePayload)(nil), // 5: nodesale.PurchasePayload
(*ActionID)(nil), // 6: nodesale.ActionID
(*ActionDelegate)(nil), // 7: nodesale.ActionDelegate
}
var file_modules_nodesale_protobuf_nodesale_proto_depIdxs = []int32{
0, // 0: nodesale.NodeSaleEvent.action:type_name -> nodesale.Action
2, // 1: nodesale.NodeSaleEvent.deploy:type_name -> nodesale.ActionDeploy
4, // 2: nodesale.NodeSaleEvent.purchase:type_name -> nodesale.ActionPurchase
7, // 3: nodesale.NodeSaleEvent.delegate:type_name -> nodesale.ActionDelegate
3, // 4: nodesale.ActionDeploy.tiers:type_name -> nodesale.Tier
5, // 5: nodesale.ActionPurchase.payload:type_name -> nodesale.PurchasePayload
6, // 6: nodesale.PurchasePayload.deployID:type_name -> nodesale.ActionID
6, // 7: nodesale.ActionDelegate.deployID:type_name -> nodesale.ActionID
8, // [8:8] is the sub-list for method output_type
8, // [8:8] is the sub-list for method input_type
8, // [8:8] is the sub-list for extension type_name
8, // [8:8] is the sub-list for extension extendee
0, // [0:8] is the sub-list for field type_name
}
func init() { file_modules_nodesale_protobuf_nodesale_proto_init() }
func file_modules_nodesale_protobuf_nodesale_proto_init() {
if File_modules_nodesale_protobuf_nodesale_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_modules_nodesale_protobuf_nodesale_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*NodeSaleEvent); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_modules_nodesale_protobuf_nodesale_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ActionDeploy); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_modules_nodesale_protobuf_nodesale_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Tier); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_modules_nodesale_protobuf_nodesale_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ActionPurchase); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_modules_nodesale_protobuf_nodesale_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*PurchasePayload); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_modules_nodesale_protobuf_nodesale_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ActionID); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_modules_nodesale_protobuf_nodesale_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ActionDelegate); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_modules_nodesale_protobuf_nodesale_proto_msgTypes[0].OneofWrappers = []interface{}{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_modules_nodesale_protobuf_nodesale_proto_rawDesc,
NumEnums: 1,
NumMessages: 7,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_modules_nodesale_protobuf_nodesale_proto_goTypes,
DependencyIndexes: file_modules_nodesale_protobuf_nodesale_proto_depIdxs,
EnumInfos: file_modules_nodesale_protobuf_nodesale_proto_enumTypes,
MessageInfos: file_modules_nodesale_protobuf_nodesale_proto_msgTypes,
}.Build()
File_modules_nodesale_protobuf_nodesale_proto = out.File
file_modules_nodesale_protobuf_nodesale_proto_rawDesc = nil
file_modules_nodesale_protobuf_nodesale_proto_goTypes = nil
file_modules_nodesale_protobuf_nodesale_proto_depIdxs = nil
}

View File

@@ -0,0 +1,60 @@
syntax = "proto3";
// protoc modules/nodesale/protobuf/nodesale.proto --go_out=. --go_opt=module=github.com/gaze-network/indexer-network
package nodesale;
option go_package = "github.com/gaze-network/indexer-network/modules/nodesale/protobuf";
enum Action {
ACTION_DEPLOY = 0;
ACTION_PURCHASE = 1;
ACTION_DELEGATE = 2;
}
message NodeSaleEvent {
Action action = 1;
optional ActionDeploy deploy = 2;
optional ActionPurchase purchase = 3;
optional ActionDelegate delegate = 4;
}
message ActionDeploy {
string name = 1;
uint32 startsAt = 2;
uint32 endsAt = 3;
repeated Tier tiers = 4;
string sellerPublicKey = 5;
uint32 maxPerAddress = 6;
uint32 maxDiscountPercentage = 7;
string sellerWallet = 8;
}
message Tier {
uint32 priceSat = 1;
uint32 limit = 2;
uint32 maxPerAddress = 3;
}
message ActionPurchase {
PurchasePayload payload = 1;
string sellerSignature = 2;
}
message PurchasePayload {
ActionID deployID = 1;
string buyerPublicKey = 2;
repeated uint32 nodeIDs = 3;
int64 totalAmountSat = 4;
uint64 timeOutBlock = 5;
}
message ActionID {
uint64 block = 1;
uint32 txIndex = 2;
}
message ActionDelegate {
string delegateePublicKey = 1;
repeated uint32 nodeIDs = 2;
ActionID deployID = 3;
}

View File

@@ -0,0 +1,12 @@
package nodesale
import (
"github.com/btcsuite/btcd/btcec/v2"
"github.com/btcsuite/btcd/btcutil"
)
func (p *Processor) PubkeyToPkHashAddress(pubKey *btcec.PublicKey) btcutil.Address {
addrPubKey, _ := btcutil.NewAddressPubKey(pubKey.SerializeCompressed(), p.Network.ChainParams())
addrPubKeyHash := addrPubKey.AddressPubKeyHash()
return addrPubKeyHash
}

View File

@@ -0,0 +1,87 @@
package nodesale
import (
"context"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/core/types"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
purchasevalidator "github.com/gaze-network/indexer-network/modules/nodesale/internal/validator/purchase"
)
func (p *Processor) ProcessPurchase(ctx context.Context, qtx datagateway.NodeSaleDataGatewayWithTx, block *types.Block, event NodeSaleEvent) error {
purchase := event.EventMessage.Purchase
payload := purchase.Payload
validator := purchasevalidator.New()
validator.EqualXonlyPublicKey(payload.BuyerPublicKey, event.TxPubkey)
_, deploy, err := validator.NodeSaleExists(ctx, qtx, payload)
if err != nil {
return errors.Wrap(err, "cannot query. Something wrong.")
}
validator.ValidTimestamp(deploy, block.Header.Timestamp)
validator.WithinTimeoutBlock(payload.TimeOutBlock, uint64(event.Transaction.BlockHeight))
validator.VerifySignature(purchase, deploy)
_, tierMap := validator.ValidTiers(payload, deploy)
tiers := tierMap.Tiers
buyingTiersCount := tierMap.BuyingTiersCount
nodeIdToTier := tierMap.NodeIdToTier
_, err = validator.ValidUnpurchasedNodes(ctx, qtx, payload)
if err != nil {
return errors.Wrap(err, "cannot query. Something wrong.")
}
_, meta := validator.ValidPaidAmount(payload, deploy, event.InputValue, tiers, buyingTiersCount, p.Network.ChainParams())
_, err = validator.WithinLimit(ctx, qtx, payload, deploy, tiers, buyingTiersCount)
if err != nil {
return errors.Wrap(err, "cannot query. Something wrong.")
}
err = qtx.CreateEvent(ctx, entity.NodeSaleEvent{
TxHash: event.Transaction.TxHash.String(),
TxIndex: int32(event.Transaction.Index),
Action: int32(event.EventMessage.Action),
RawMessage: event.RawData,
ParsedMessage: event.EventJson,
BlockTimestamp: block.Header.Timestamp,
BlockHash: event.Transaction.BlockHash.String(),
BlockHeight: event.Transaction.BlockHeight,
Valid: validator.Valid,
WalletAddress: p.PubkeyToPkHashAddress(event.TxPubkey).EncodeAddress(),
Metadata: meta,
Reason: validator.Reason,
})
if err != nil {
return errors.Wrap(err, "Failed to insert event")
}
if validator.Valid {
// add to node
for _, nodeId := range payload.NodeIDs {
err := qtx.CreateNode(ctx, entity.Node{
SaleBlock: deploy.BlockHeight,
SaleTxIndex: deploy.TxIndex,
NodeID: nodeId,
TierIndex: nodeIdToTier[nodeId],
DelegatedTo: "",
OwnerPublicKey: payload.BuyerPublicKey,
PurchaseTxHash: event.Transaction.TxHash.String(),
DelegateTxHash: "",
})
if err != nil {
return errors.Wrap(err, "Failed to insert node")
}
}
}
return nil
}

View File

@@ -0,0 +1,902 @@
package nodesale
import (
"context"
"encoding/hex"
"testing"
"time"
"github.com/btcsuite/btcd/btcec/v2"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/decred/dcrd/dcrec/secp256k1/v4/ecdsa"
"github.com/gaze-network/indexer-network/common"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway/mocks"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/validator"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/validator/purchase"
"github.com/gaze-network/indexer-network/modules/nodesale/protobuf"
"github.com/samber/lo"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/encoding/protojson"
"google.golang.org/protobuf/proto"
)
func TestInvalidPurchase(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
buyerPrivateKey, err := btcec.NewPrivateKey()
require.NoError(t, err)
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 111,
TxIndex: 1,
},
NodeIDs: []uint32{1, 2},
BuyerPublicKey: buyerPubkeyHex,
TotalAmountSat: 500,
TimeOutBlock: uint64(testBlockHeight) + 5,
},
},
}
event, block := assembleTestEvent(buyerPrivateKey, "030303030303", "030303030303", 0, 0, message)
mockDgTx.EXPECT().GetNodeSale(mock.Anything, mock.Anything).Return(nil, nil)
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false
})).Return(nil)
err = p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
mockDgTx.AssertNotCalled(t, "CreateNode")
}
func TestInvalidBuyerKey(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
strangerPrivateKey, _ := btcec.NewPrivateKey()
strangerPrivateKeyHex := hex.EncodeToString(strangerPrivateKey.PubKey().SerializeCompressed())
buyerPrivateKey, _ := btcec.NewPrivateKey()
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 100,
TxIndex: 1,
},
NodeIDs: []uint32{1, 2},
BuyerPublicKey: strangerPrivateKeyHex,
TotalAmountSat: 200,
TimeOutBlock: uint64(testBlockHeight) + 5,
},
},
}
event, block := assembleTestEvent(buyerPrivateKey, "0707070707", "0707070707", 0, 0, message)
block.Header.Timestamp = time.Now().UTC()
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false && event.Reason == validator.INVALID_PUBKEY
})).Return(nil)
err := p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
mockDgTx.AssertNotCalled(t, "CreateNode")
}
func TestInvalidTimestamp(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
sellerPrivateKey, err := btcec.NewPrivateKey()
require.NoError(t, err)
sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey())
startAt := time.Now().Add(time.Hour * -1)
endAt := time.Now().Add(time.Hour * 1)
tiers := lo.Map([]*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 5,
MaxPerAddress: 100,
},
}, func(tier *protobuf.Tier, _ int) []byte {
tierJson, err := protojson.Marshal(tier)
require.NoError(t, err)
return tierJson
})
mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{
BlockHeight: 100,
TxIndex: 1,
}).Return([]entity.NodeSale{
{
BlockHeight: 100,
TxIndex: 1,
Name: t.Name(),
StartsAt: startAt,
EndsAt: endAt,
Tiers: tiers,
SellerPublicKey: sellerPubkeyHex,
MaxPerAddress: 100,
DeployTxHash: "040404040404",
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}, nil)
buyerPrivateKey, _ := btcec.NewPrivateKey()
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 100,
TxIndex: 1,
},
NodeIDs: []uint32{1, 2},
BuyerPublicKey: buyerPubkeyHex,
TotalAmountSat: 200,
TimeOutBlock: uint64(testBlockHeight) + 5,
},
},
}
event, block := assembleTestEvent(buyerPrivateKey, "050505050505", "050505050505", 0, 0, message)
block.Header.Timestamp = time.Now().UTC().Add(time.Hour * 2)
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false && event.Reason == purchase.PURCHASE_TIMEOUT
})).Return(nil)
err = p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
mockDgTx.AssertNotCalled(t, "CreateNode")
}
func TestTimeOut(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
sellerPrivateKey, _ := btcec.NewPrivateKey()
sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey())
startAt := time.Now().Add(time.Hour * -1)
endAt := time.Now().Add(time.Hour * 1)
tiers := lo.Map([]*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 5,
MaxPerAddress: 100,
},
}, func(tier *protobuf.Tier, _ int) []byte {
tierJson, err := protojson.Marshal(tier)
require.NoError(t, err)
return tierJson
})
mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{
BlockHeight: 100,
TxIndex: 1,
}).Return([]entity.NodeSale{
{
BlockHeight: 100,
TxIndex: 1,
Name: t.Name(),
StartsAt: startAt,
EndsAt: endAt,
Tiers: tiers,
SellerPublicKey: sellerPubkeyHex,
MaxPerAddress: 100,
DeployTxHash: "040404040404",
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}, nil)
buyerPrivateKey, _ := btcec.NewPrivateKey()
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 100,
TxIndex: 1,
},
NodeIDs: []uint32{1, 2},
BuyerPublicKey: buyerPubkeyHex,
TimeOutBlock: uint64(testBlockHeight) - 5,
TotalAmountSat: 200,
},
},
}
event, block := assembleTestEvent(buyerPrivateKey, "090909090909", "090909090909", 0, 0, message)
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false && event.Reason == purchase.BLOCK_HEIGHT_TIMEOUT
})).Return(nil)
err := p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
mockDgTx.AssertNotCalled(t, "CreateNode")
}
func TestSignatureInvalid(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
sellerPrivateKey, _ := btcec.NewPrivateKey()
sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey())
startAt := time.Now().Add(time.Hour * -1)
endAt := time.Now().Add(time.Hour * 1)
tiers := lo.Map([]*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 5,
MaxPerAddress: 100,
},
}, func(tier *protobuf.Tier, _ int) []byte {
tierJson, err := protojson.Marshal(tier)
require.NoError(t, err)
return tierJson
})
mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{
BlockHeight: 100,
TxIndex: 1,
}).Return([]entity.NodeSale{
{
BlockHeight: 100,
TxIndex: 1,
Name: t.Name(),
StartsAt: startAt,
EndsAt: endAt,
Tiers: tiers,
SellerPublicKey: sellerPubkeyHex,
MaxPerAddress: 100,
DeployTxHash: "040404040404",
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}, nil)
buyerPrivateKey, _ := btcec.NewPrivateKey()
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
payload := &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 100,
TxIndex: 1,
},
NodeIDs: []uint32{1, 2},
BuyerPublicKey: buyerPubkeyHex,
TimeOutBlock: testBlockHeight + 5,
}
payloadBytes, _ := proto.Marshal(payload)
payloadHash := chainhash.DoubleHashB(payloadBytes)
signature := ecdsa.Sign(buyerPrivateKey, payloadHash[:])
signatureHex := hex.EncodeToString(signature.Serialize())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: payload,
SellerSignature: signatureHex,
},
}
event, block := assembleTestEvent(buyerPrivateKey, "0B0B0B", "0B0B0B", 0, 0, message)
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false && event.Reason == purchase.INVALID_SIGNATURE
})).Return(nil)
err := p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
mockDgTx.AssertNotCalled(t, "CreateNode")
}
func TestValidPurchase(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
sellerPrivateKey, _ := btcec.NewPrivateKey()
sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey())
startAt := time.Now().Add(time.Hour * -1)
endAt := time.Now().Add(time.Hour * 1)
tiers := lo.Map([]*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 4,
MaxPerAddress: 2,
},
{
PriceSat: 400,
Limit: 3,
MaxPerAddress: 100,
},
}, func(tier *protobuf.Tier, _ int) []byte {
tierJson, err := protojson.Marshal(tier)
require.NoError(t, err)
return tierJson
})
mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{
BlockHeight: 100,
TxIndex: 1,
}).Return([]entity.NodeSale{
{
BlockHeight: 100,
TxIndex: 1,
Name: t.Name(),
StartsAt: startAt,
EndsAt: endAt,
Tiers: tiers,
SellerPublicKey: sellerPubkeyHex,
MaxPerAddress: 100,
DeployTxHash: "040404040404",
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}, nil)
mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil)
mockDgTx.EXPECT().GetNodesByOwner(mock.Anything, mock.Anything).Return(nil, nil)
buyerPrivateKey, _ := btcec.NewPrivateKey()
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
payload := &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 100,
TxIndex: 1,
},
BuyerPublicKey: buyerPubkeyHex,
TimeOutBlock: uint64(testBlockHeight) + 5,
NodeIDs: []uint32{0, 5, 6, 9},
TotalAmountSat: 500,
}
payloadBytes, _ := proto.Marshal(payload)
payloadHash := chainhash.DoubleHashB(payloadBytes)
signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:])
signatureHex := hex.EncodeToString(signature.Serialize())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: payload,
SellerSignature: signatureHex,
},
}
event, block := assembleTestEvent(buyerPrivateKey, "0D0D0D0D", "0D0D0D0D", 0, 0, message)
event.InputValue = 500
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == true && event.Reason == ""
})).Return(nil)
mockDgTx.EXPECT().CreateNode(mock.Anything, mock.MatchedBy(func(node entity.Node) bool {
return node.NodeID == 0 &&
node.TierIndex == 0 &&
node.OwnerPublicKey == buyerPubkeyHex &&
node.PurchaseTxHash == event.Transaction.TxHash.String() &&
node.SaleBlock == 100 &&
node.SaleTxIndex == 1
})).Return(nil)
mockDgTx.EXPECT().CreateNode(mock.Anything, mock.MatchedBy(func(node entity.Node) bool {
return node.NodeID == 5 &&
node.TierIndex == 1 &&
node.OwnerPublicKey == buyerPubkeyHex &&
node.PurchaseTxHash == event.Transaction.TxHash.String() &&
node.SaleBlock == 100 &&
node.SaleTxIndex == 1
})).Return(nil)
mockDgTx.EXPECT().CreateNode(mock.Anything, mock.MatchedBy(func(node entity.Node) bool {
return node.NodeID == 6 &&
node.TierIndex == 1 &&
node.OwnerPublicKey == buyerPubkeyHex &&
node.PurchaseTxHash == event.Transaction.TxHash.String() &&
node.SaleBlock == 100 &&
node.SaleTxIndex == 1
})).Return(nil)
mockDgTx.EXPECT().CreateNode(mock.Anything, mock.MatchedBy(func(node entity.Node) bool {
return node.NodeID == 9 &&
node.TierIndex == 2 &&
node.OwnerPublicKey == buyerPubkeyHex &&
node.PurchaseTxHash == event.Transaction.TxHash.String() &&
node.SaleBlock == 100 &&
node.SaleTxIndex == 1
})).Return(nil)
err := p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
}
func TestMismatchPayment(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
sellerPrivateKey, _ := btcec.NewPrivateKey()
sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey())
startAt := time.Now().Add(time.Hour * -1)
endAt := time.Now().Add(time.Hour * 1)
tiers := lo.Map([]*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 4,
MaxPerAddress: 2,
},
{
PriceSat: 400,
Limit: 3,
MaxPerAddress: 100,
},
}, func(tier *protobuf.Tier, _ int) []byte {
tierJson, err := protojson.Marshal(tier)
require.NoError(t, err)
return tierJson
})
mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{
BlockHeight: 100,
TxIndex: 1,
}).Return([]entity.NodeSale{
{
BlockHeight: 100,
TxIndex: 1,
Name: t.Name(),
StartsAt: startAt,
EndsAt: endAt,
Tiers: tiers,
SellerPublicKey: sellerPubkeyHex,
MaxPerAddress: 100,
DeployTxHash: "040404040404",
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}, nil)
mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil)
buyerPrivateKey, _ := btcec.NewPrivateKey()
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
payload := &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 100,
TxIndex: 1,
},
BuyerPublicKey: buyerPubkeyHex,
TimeOutBlock: uint64(testBlockHeight) + 5,
NodeIDs: []uint32{0, 5, 6, 9},
TotalAmountSat: 500,
}
payloadBytes, _ := proto.Marshal(payload)
payloadHash := chainhash.DoubleHashB(payloadBytes)
signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:])
signatureHex := hex.EncodeToString(signature.Serialize())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: payload,
SellerSignature: signatureHex,
},
}
event, block := assembleTestEvent(buyerPrivateKey, "0D0D0D0D", "0D0D0D0D", 0, 0, message)
event.InputValue = 400
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false && event.Reason == purchase.INVALID_PAYMENT
})).Return(nil)
err := p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
}
func TestInsufficientFund(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
sellerPrivateKey, _ := btcec.NewPrivateKey()
sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey())
startAt := time.Now().Add(time.Hour * -1)
endAt := time.Now().Add(time.Hour * 1)
tiers := lo.Map([]*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 4,
MaxPerAddress: 2,
},
{
PriceSat: 400,
Limit: 3,
MaxPerAddress: 100,
},
}, func(tier *protobuf.Tier, _ int) []byte {
tierJson, err := protojson.Marshal(tier)
require.NoError(t, err)
return tierJson
})
mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{
BlockHeight: 100,
TxIndex: 1,
}).Return([]entity.NodeSale{
{
BlockHeight: 100,
TxIndex: 1,
Name: t.Name(),
StartsAt: startAt,
EndsAt: endAt,
Tiers: tiers,
SellerPublicKey: sellerPubkeyHex,
MaxPerAddress: 100,
DeployTxHash: "040404040404",
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}, nil)
mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil)
buyerPrivateKey, _ := btcec.NewPrivateKey()
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
payload := &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 100,
TxIndex: 1,
},
BuyerPublicKey: buyerPubkeyHex,
TimeOutBlock: uint64(testBlockHeight) + 5,
NodeIDs: []uint32{0, 5, 6, 9},
TotalAmountSat: 200,
}
payloadBytes, _ := proto.Marshal(payload)
payloadHash := chainhash.DoubleHashB(payloadBytes)
signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:])
signatureHex := hex.EncodeToString(signature.Serialize())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: payload,
SellerSignature: signatureHex,
},
}
event, block := assembleTestEvent(buyerPrivateKey, "0D0D0D0D", "0D0D0D0D", 0, 0, message)
event.InputValue = 200
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false && event.Reason == purchase.INSUFFICIENT_FUND
})).Return(nil)
err := p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
}
func TestBuyingLimit(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
sellerPrivateKey, _ := btcec.NewPrivateKey()
sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey())
startAt := time.Now().Add(time.Hour * -1)
endAt := time.Now().Add(time.Hour * 1)
tiers := lo.Map([]*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 4,
MaxPerAddress: 2,
},
{
PriceSat: 400,
Limit: 50,
MaxPerAddress: 100,
},
}, func(tier *protobuf.Tier, _ int) []byte {
tierJson, err := protojson.Marshal(tier)
require.NoError(t, err)
return tierJson
})
mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{
BlockHeight: 100,
TxIndex: 1,
}).Return([]entity.NodeSale{
{
BlockHeight: 100,
TxIndex: 1,
Name: t.Name(),
StartsAt: startAt,
EndsAt: endAt,
Tiers: tiers,
SellerPublicKey: sellerPubkeyHex,
MaxPerAddress: 2,
DeployTxHash: "040404040404",
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}, nil)
buyerPrivateKey, _ := btcec.NewPrivateKey()
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil)
mockDgTx.EXPECT().GetNodesByOwner(mock.Anything, datagateway.GetNodesByOwnerParams{
SaleBlock: 100,
SaleTxIndex: 1,
OwnerPublicKey: buyerPubkeyHex,
}).Return([]entity.Node{
{
SaleBlock: 100,
SaleTxIndex: 1,
NodeID: 9,
TierIndex: 2,
OwnerPublicKey: buyerPubkeyHex,
},
{
SaleBlock: 100,
SaleTxIndex: 1,
NodeID: 10,
TierIndex: 2,
OwnerPublicKey: buyerPubkeyHex,
},
}, nil)
payload := &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 100,
TxIndex: 1,
},
BuyerPublicKey: buyerPubkeyHex,
TimeOutBlock: uint64(testBlockHeight) + 5,
NodeIDs: []uint32{11},
TotalAmountSat: 600,
}
payloadBytes, _ := proto.Marshal(payload)
payloadHash := chainhash.DoubleHashB(payloadBytes)
signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:])
signatureHex := hex.EncodeToString(signature.Serialize())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: payload,
SellerSignature: signatureHex,
},
}
event, block := assembleTestEvent(buyerPrivateKey, "22222222", "22222222", 0, 0, message)
event.InputValue = 600
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false && event.Reason == purchase.OVER_LIMIT_PER_ADDR
})).Return(nil)
err := p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
mockDgTx.AssertNotCalled(t, "CreateNode")
}
func TestBuyingTierLimit(t *testing.T) {
ctx := context.Background()
mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t)
p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0)
sellerPrivateKey, _ := btcec.NewPrivateKey()
sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed())
sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey())
startAt := time.Now().Add(time.Hour * -1)
endAt := time.Now().Add(time.Hour * 1)
tiers := lo.Map([]*protobuf.Tier{
{
PriceSat: 100,
Limit: 5,
MaxPerAddress: 100,
},
{
PriceSat: 200,
Limit: 4,
MaxPerAddress: 2,
},
{
PriceSat: 400,
Limit: 50,
MaxPerAddress: 3,
},
}, func(tier *protobuf.Tier, _ int) []byte {
tierJson, err := protojson.Marshal(tier)
require.NoError(t, err)
return tierJson
})
mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{
BlockHeight: 100,
TxIndex: 1,
}).Return([]entity.NodeSale{
{
BlockHeight: 100,
TxIndex: 1,
Name: t.Name(),
StartsAt: startAt,
EndsAt: endAt,
Tiers: tiers,
SellerPublicKey: sellerPubkeyHex,
MaxPerAddress: 100,
DeployTxHash: "040404040404",
MaxDiscountPercentage: 50,
SellerWallet: sellerWallet.EncodeAddress(),
},
}, nil)
buyerPrivateKey, _ := btcec.NewPrivateKey()
buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed())
mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil)
mockDgTx.EXPECT().GetNodesByOwner(mock.Anything, datagateway.GetNodesByOwnerParams{
SaleBlock: 100,
SaleTxIndex: 1,
OwnerPublicKey: buyerPubkeyHex,
}).Return([]entity.Node{
{
SaleBlock: 100,
SaleTxIndex: 1,
NodeID: 9,
TierIndex: 2,
OwnerPublicKey: buyerPubkeyHex,
},
{
SaleBlock: 100,
SaleTxIndex: 1,
NodeID: 10,
TierIndex: 2,
OwnerPublicKey: buyerPubkeyHex,
},
{
SaleBlock: 100,
SaleTxIndex: 1,
NodeID: 11,
TierIndex: 2,
OwnerPublicKey: buyerPubkeyHex,
},
}, nil)
payload := &protobuf.PurchasePayload{
DeployID: &protobuf.ActionID{
Block: 100,
TxIndex: 1,
},
BuyerPublicKey: buyerPubkeyHex,
TimeOutBlock: uint64(testBlockHeight) + 5,
NodeIDs: []uint32{12, 13, 14},
TotalAmountSat: 600,
}
payloadBytes, _ := proto.Marshal(payload)
payloadHash := chainhash.DoubleHashB(payloadBytes)
signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:])
signatureHex := hex.EncodeToString(signature.Serialize())
message := &protobuf.NodeSaleEvent{
Action: protobuf.Action_ACTION_PURCHASE,
Purchase: &protobuf.ActionPurchase{
Payload: payload,
SellerSignature: signatureHex,
},
}
event, block := assembleTestEvent(buyerPrivateKey, "10101010", "10101010", 0, 0, message)
event.InputValue = 600
mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool {
return event.Valid == false && event.Reason == purchase.OVER_LIMIT_PER_TIER
})).Return(nil)
err := p.ProcessPurchase(ctx, mockDgTx, block, event)
require.NoError(t, err)
}

View File

@@ -0,0 +1,62 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.26.0
// source: blocks.sql
package gen
import (
"context"
)
const createBlock = `-- name: CreateBlock :exec
INSERT INTO blocks ("block_height", "block_hash", "module")
VALUES ($1, $2, $3)
`
type CreateBlockParams struct {
BlockHeight int64
BlockHash string
Module string
}
func (q *Queries) CreateBlock(ctx context.Context, arg CreateBlockParams) error {
_, err := q.db.Exec(ctx, createBlock, arg.BlockHeight, arg.BlockHash, arg.Module)
return err
}
const getBlock = `-- name: GetBlock :one
SELECT block_height, block_hash, module FROM blocks
WHERE "block_height" = $1
`
func (q *Queries) GetBlock(ctx context.Context, blockHeight int64) (Block, error) {
row := q.db.QueryRow(ctx, getBlock, blockHeight)
var i Block
err := row.Scan(&i.BlockHeight, &i.BlockHash, &i.Module)
return i, err
}
const getLastProcessedBlock = `-- name: GetLastProcessedBlock :one
SELECT block_height, block_hash, module FROM blocks ORDER BY block_height DESC LIMIT 1
`
func (q *Queries) GetLastProcessedBlock(ctx context.Context) (Block, error) {
row := q.db.QueryRow(ctx, getLastProcessedBlock)
var i Block
err := row.Scan(&i.BlockHeight, &i.BlockHash, &i.Module)
return i, err
}
const removeBlockFrom = `-- name: RemoveBlockFrom :execrows
DELETE FROM blocks
WHERE "block_height" >= $1
`
func (q *Queries) RemoveBlockFrom(ctx context.Context, fromBlock int64) (int64, error) {
result, err := q.db.Exec(ctx, removeBlockFrom, fromBlock)
if err != nil {
return 0, err
}
return result.RowsAffected(), nil
}

View File

@@ -0,0 +1,32 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.26.0
package gen
import (
"context"
"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgconn"
)
type DBTX interface {
Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error)
Query(context.Context, string, ...interface{}) (pgx.Rows, error)
QueryRow(context.Context, string, ...interface{}) pgx.Row
}
func New(db DBTX) *Queries {
return &Queries{db: db}
}
type Queries struct {
db DBTX
}
func (q *Queries) WithTx(tx pgx.Tx) *Queries {
return &Queries{
db: tx,
}
}

View File

@@ -0,0 +1,104 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.26.0
// source: events.sql
package gen
import (
"context"
"github.com/jackc/pgx/v5/pgtype"
)
const createEvent = `-- name: CreateEvent :exec
INSERT INTO events ("tx_hash", "block_height", "tx_index", "wallet_address", "valid", "action",
"raw_message", "parsed_message", "block_timestamp", "block_hash", "metadata",
"reason")
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
`
type CreateEventParams struct {
TxHash string
BlockHeight int64
TxIndex int32
WalletAddress string
Valid bool
Action int32
RawMessage []byte
ParsedMessage []byte
BlockTimestamp pgtype.Timestamp
BlockHash string
Metadata []byte
Reason string
}
func (q *Queries) CreateEvent(ctx context.Context, arg CreateEventParams) error {
_, err := q.db.Exec(ctx, createEvent,
arg.TxHash,
arg.BlockHeight,
arg.TxIndex,
arg.WalletAddress,
arg.Valid,
arg.Action,
arg.RawMessage,
arg.ParsedMessage,
arg.BlockTimestamp,
arg.BlockHash,
arg.Metadata,
arg.Reason,
)
return err
}
const getEventsByWallet = `-- name: GetEventsByWallet :many
SELECT tx_hash, block_height, tx_index, wallet_address, valid, action, raw_message, parsed_message, block_timestamp, block_hash, metadata, reason
FROM events
WHERE wallet_address = $1
`
func (q *Queries) GetEventsByWallet(ctx context.Context, walletAddress string) ([]Event, error) {
rows, err := q.db.Query(ctx, getEventsByWallet, walletAddress)
if err != nil {
return nil, err
}
defer rows.Close()
var items []Event
for rows.Next() {
var i Event
if err := rows.Scan(
&i.TxHash,
&i.BlockHeight,
&i.TxIndex,
&i.WalletAddress,
&i.Valid,
&i.Action,
&i.RawMessage,
&i.ParsedMessage,
&i.BlockTimestamp,
&i.BlockHash,
&i.Metadata,
&i.Reason,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const removeEventsFromBlock = `-- name: RemoveEventsFromBlock :execrows
DELETE FROM events
WHERE "block_height" >= $1
`
func (q *Queries) RemoveEventsFromBlock(ctx context.Context, fromBlock int64) (int64, error) {
result, err := q.db.Exec(ctx, removeEventsFromBlock, fromBlock)
if err != nil {
return 0, err
}
return result.RowsAffected(), nil
}

View File

@@ -0,0 +1,55 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.26.0
package gen
import (
"github.com/jackc/pgx/v5/pgtype"
)
type Block struct {
BlockHeight int64
BlockHash string
Module string
}
type Event struct {
TxHash string
BlockHeight int64
TxIndex int32
WalletAddress string
Valid bool
Action int32
RawMessage []byte
ParsedMessage []byte
BlockTimestamp pgtype.Timestamp
BlockHash string
Metadata []byte
Reason string
}
type Node struct {
SaleBlock int64
SaleTxIndex int32
NodeID int32
TierIndex int32
DelegatedTo string
OwnerPublicKey string
PurchaseTxHash string
DelegateTxHash string
}
type NodeSale struct {
BlockHeight int64
TxIndex int32
Name string
StartsAt pgtype.Timestamp
EndsAt pgtype.Timestamp
Tiers [][]byte
SellerPublicKey string
MaxPerAddress int32
DeployTxHash string
MaxDiscountPercentage int32
SellerWallet string
}

View File

@@ -0,0 +1,271 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.26.0
// source: nodes.sql
package gen
import (
"context"
)
const clearDelegate = `-- name: ClearDelegate :execrows
UPDATE nodes
SET "delegated_to" = ''
WHERE "delegate_tx_hash" = ''
`
func (q *Queries) ClearDelegate(ctx context.Context) (int64, error) {
result, err := q.db.Exec(ctx, clearDelegate)
if err != nil {
return 0, err
}
return result.RowsAffected(), nil
}
const createNode = `-- name: CreateNode :exec
INSERT INTO nodes (sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
`
type CreateNodeParams struct {
SaleBlock int64
SaleTxIndex int32
NodeID int32
TierIndex int32
DelegatedTo string
OwnerPublicKey string
PurchaseTxHash string
DelegateTxHash string
}
func (q *Queries) CreateNode(ctx context.Context, arg CreateNodeParams) error {
_, err := q.db.Exec(ctx, createNode,
arg.SaleBlock,
arg.SaleTxIndex,
arg.NodeID,
arg.TierIndex,
arg.DelegatedTo,
arg.OwnerPublicKey,
arg.PurchaseTxHash,
arg.DelegateTxHash,
)
return err
}
const getNodeCountByTierIndex = `-- name: GetNodeCountByTierIndex :many
SELECT (tiers.tier_index)::int AS tier_index, count(nodes.tier_index)
FROM generate_series($3::int,$4::int) AS tiers(tier_index)
LEFT JOIN
(SELECT sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash
FROM nodes
WHERE sale_block = $1 AND
sale_tx_index= $2)
AS nodes ON tiers.tier_index = nodes.tier_index
GROUP BY tiers.tier_index
ORDER BY tiers.tier_index
`
type GetNodeCountByTierIndexParams struct {
SaleBlock int64
SaleTxIndex int32
FromTier int32
ToTier int32
}
type GetNodeCountByTierIndexRow struct {
TierIndex int32
Count int64
}
func (q *Queries) GetNodeCountByTierIndex(ctx context.Context, arg GetNodeCountByTierIndexParams) ([]GetNodeCountByTierIndexRow, error) {
rows, err := q.db.Query(ctx, getNodeCountByTierIndex,
arg.SaleBlock,
arg.SaleTxIndex,
arg.FromTier,
arg.ToTier,
)
if err != nil {
return nil, err
}
defer rows.Close()
var items []GetNodeCountByTierIndexRow
for rows.Next() {
var i GetNodeCountByTierIndexRow
if err := rows.Scan(&i.TierIndex, &i.Count); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getNodesByIds = `-- name: GetNodesByIds :many
SELECT sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash
FROM nodes
WHERE sale_block = $1 AND
sale_tx_index = $2 AND
node_id = ANY ($3::int[])
`
type GetNodesByIdsParams struct {
SaleBlock int64
SaleTxIndex int32
NodeIds []int32
}
func (q *Queries) GetNodesByIds(ctx context.Context, arg GetNodesByIdsParams) ([]Node, error) {
rows, err := q.db.Query(ctx, getNodesByIds, arg.SaleBlock, arg.SaleTxIndex, arg.NodeIds)
if err != nil {
return nil, err
}
defer rows.Close()
var items []Node
for rows.Next() {
var i Node
if err := rows.Scan(
&i.SaleBlock,
&i.SaleTxIndex,
&i.NodeID,
&i.TierIndex,
&i.DelegatedTo,
&i.OwnerPublicKey,
&i.PurchaseTxHash,
&i.DelegateTxHash,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getNodesByOwner = `-- name: GetNodesByOwner :many
SELECT sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash
FROM nodes
WHERE sale_block = $1 AND
sale_tx_index = $2 AND
owner_public_key = $3
ORDER BY tier_index
`
type GetNodesByOwnerParams struct {
SaleBlock int64
SaleTxIndex int32
OwnerPublicKey string
}
func (q *Queries) GetNodesByOwner(ctx context.Context, arg GetNodesByOwnerParams) ([]Node, error) {
rows, err := q.db.Query(ctx, getNodesByOwner, arg.SaleBlock, arg.SaleTxIndex, arg.OwnerPublicKey)
if err != nil {
return nil, err
}
defer rows.Close()
var items []Node
for rows.Next() {
var i Node
if err := rows.Scan(
&i.SaleBlock,
&i.SaleTxIndex,
&i.NodeID,
&i.TierIndex,
&i.DelegatedTo,
&i.OwnerPublicKey,
&i.PurchaseTxHash,
&i.DelegateTxHash,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getNodesByPubkey = `-- name: GetNodesByPubkey :many
SELECT nodes.sale_block, nodes.sale_tx_index, nodes.node_id, nodes.tier_index, nodes.delegated_to, nodes.owner_public_key, nodes.purchase_tx_hash, nodes.delegate_tx_hash
FROM nodes JOIN events ON nodes.purchase_tx_hash = events.tx_hash
WHERE sale_block = $1 AND
sale_tx_index = $2 AND
owner_public_key = $3 AND
delegated_to = $4
`
type GetNodesByPubkeyParams struct {
SaleBlock int64
SaleTxIndex int32
OwnerPublicKey string
DelegatedTo string
}
func (q *Queries) GetNodesByPubkey(ctx context.Context, arg GetNodesByPubkeyParams) ([]Node, error) {
rows, err := q.db.Query(ctx, getNodesByPubkey,
arg.SaleBlock,
arg.SaleTxIndex,
arg.OwnerPublicKey,
arg.DelegatedTo,
)
if err != nil {
return nil, err
}
defer rows.Close()
var items []Node
for rows.Next() {
var i Node
if err := rows.Scan(
&i.SaleBlock,
&i.SaleTxIndex,
&i.NodeID,
&i.TierIndex,
&i.DelegatedTo,
&i.OwnerPublicKey,
&i.PurchaseTxHash,
&i.DelegateTxHash,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const setDelegates = `-- name: SetDelegates :execrows
UPDATE nodes
SET delegated_to = $4, delegate_tx_hash = $3
WHERE sale_block = $1 AND
sale_tx_index = $2 AND
node_id = ANY ($5::int[])
`
type SetDelegatesParams struct {
SaleBlock int64
SaleTxIndex int32
DelegateTxHash string
Delegatee string
NodeIds []int32
}
func (q *Queries) SetDelegates(ctx context.Context, arg SetDelegatesParams) (int64, error) {
result, err := q.db.Exec(ctx, setDelegates,
arg.SaleBlock,
arg.SaleTxIndex,
arg.DelegateTxHash,
arg.Delegatee,
arg.NodeIds,
)
if err != nil {
return 0, err
}
return result.RowsAffected(), nil
}

View File

@@ -0,0 +1,92 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.26.0
// source: nodesales.sql
package gen
import (
"context"
"github.com/jackc/pgx/v5/pgtype"
)
const createNodeSale = `-- name: CreateNodeSale :exec
INSERT INTO node_sales ("block_height", "tx_index", "name", "starts_at", "ends_at", "tiers", "seller_public_key", "max_per_address", "deploy_tx_hash", "max_discount_percentage", "seller_wallet")
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
`
type CreateNodeSaleParams struct {
BlockHeight int64
TxIndex int32
Name string
StartsAt pgtype.Timestamp
EndsAt pgtype.Timestamp
Tiers [][]byte
SellerPublicKey string
MaxPerAddress int32
DeployTxHash string
MaxDiscountPercentage int32
SellerWallet string
}
func (q *Queries) CreateNodeSale(ctx context.Context, arg CreateNodeSaleParams) error {
_, err := q.db.Exec(ctx, createNodeSale,
arg.BlockHeight,
arg.TxIndex,
arg.Name,
arg.StartsAt,
arg.EndsAt,
arg.Tiers,
arg.SellerPublicKey,
arg.MaxPerAddress,
arg.DeployTxHash,
arg.MaxDiscountPercentage,
arg.SellerWallet,
)
return err
}
const getNodeSale = `-- name: GetNodeSale :many
SELECT block_height, tx_index, name, starts_at, ends_at, tiers, seller_public_key, max_per_address, deploy_tx_hash, max_discount_percentage, seller_wallet
FROM node_sales
WHERE block_height = $1 AND
tx_index = $2
`
type GetNodeSaleParams struct {
BlockHeight int64
TxIndex int32
}
func (q *Queries) GetNodeSale(ctx context.Context, arg GetNodeSaleParams) ([]NodeSale, error) {
rows, err := q.db.Query(ctx, getNodeSale, arg.BlockHeight, arg.TxIndex)
if err != nil {
return nil, err
}
defer rows.Close()
var items []NodeSale
for rows.Next() {
var i NodeSale
if err := rows.Scan(
&i.BlockHeight,
&i.TxIndex,
&i.Name,
&i.StartsAt,
&i.EndsAt,
&i.Tiers,
&i.SellerPublicKey,
&i.MaxPerAddress,
&i.DeployTxHash,
&i.MaxDiscountPercentage,
&i.SellerWallet,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}

View File

@@ -0,0 +1,20 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.26.0
// source: test.sql
package gen
import (
"context"
)
const clearEvents = `-- name: ClearEvents :exec
DELETE FROM events
WHERE tx_hash <> ''
`
func (q *Queries) ClearEvents(ctx context.Context) error {
_, err := q.db.Exec(ctx, clearEvents)
return err
}

View File

@@ -0,0 +1,74 @@
package postgres
import (
"encoding/json"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
"github.com/gaze-network/indexer-network/modules/nodesale/repository/postgres/gen"
"github.com/samber/lo"
)
func mapNodes(nodes []gen.Node) []entity.Node {
return lo.Map(nodes, func(item gen.Node, index int) entity.Node {
return entity.Node{
SaleBlock: uint64(item.SaleBlock),
SaleTxIndex: uint32(item.SaleTxIndex),
NodeID: uint32(item.NodeID),
TierIndex: item.TierIndex,
DelegatedTo: item.DelegatedTo,
OwnerPublicKey: item.OwnerPublicKey,
PurchaseTxHash: item.PurchaseTxHash,
DelegateTxHash: item.DelegateTxHash,
}
})
}
func mapNodeSales(nodeSales []gen.NodeSale) []entity.NodeSale {
return lo.Map(nodeSales, func(item gen.NodeSale, index int) entity.NodeSale {
return entity.NodeSale{
BlockHeight: uint64(item.BlockHeight),
TxIndex: uint32(item.TxIndex),
Name: item.Name,
StartsAt: item.StartsAt.Time,
EndsAt: item.EndsAt.Time,
Tiers: item.Tiers,
SellerPublicKey: item.SellerPublicKey,
MaxPerAddress: uint32(item.MaxPerAddress),
DeployTxHash: item.DeployTxHash,
MaxDiscountPercentage: item.MaxDiscountPercentage,
SellerWallet: item.SellerWallet,
}
})
}
func mapNodeCountByTierIndexRows(nodeCount []gen.GetNodeCountByTierIndexRow) []datagateway.GetNodeCountByTierIndexRow {
return lo.Map(nodeCount, func(item gen.GetNodeCountByTierIndexRow, index int) datagateway.GetNodeCountByTierIndexRow {
return datagateway.GetNodeCountByTierIndexRow{
TierIndex: item.TierIndex,
}
})
}
func mapNodeSalesEvents(events []gen.Event) []entity.NodeSaleEvent {
return lo.Map(events, func(item gen.Event, index int) entity.NodeSaleEvent {
var meta entity.MetadataEventPurchase
err := json.Unmarshal(item.Metadata, &meta)
if err != nil {
meta = entity.MetadataEventPurchase{}
}
return entity.NodeSaleEvent{
TxHash: item.TxHash,
BlockHeight: item.BlockHeight,
TxIndex: item.TxIndex,
WalletAddress: item.WalletAddress,
Valid: item.Valid,
Action: item.Action,
RawMessage: item.RawMessage,
ParsedMessage: item.ParsedMessage,
BlockTimestamp: item.BlockTimestamp.Time.UTC(),
BlockHash: item.BlockHash,
Metadata: &meta,
}
})
}

View File

@@ -0,0 +1,236 @@
package postgres
import (
"context"
"encoding/json"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/internal/postgres"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/modules/nodesale/internal/entity"
"github.com/gaze-network/indexer-network/modules/nodesale/repository/postgres/gen"
"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgtype"
"github.com/samber/lo"
)
type Repository struct {
db postgres.DB
queries *gen.Queries
tx pgx.Tx
}
func NewRepository(db postgres.DB) *Repository {
return &Repository{
db: db,
queries: gen.New(db),
}
}
func (repo *Repository) CreateBlock(ctx context.Context, arg entity.Block) error {
err := repo.queries.CreateBlock(ctx, gen.CreateBlockParams{
BlockHeight: arg.BlockHeight,
BlockHash: arg.BlockHash,
Module: arg.Module,
})
if err != nil {
return errors.Wrap(err, "Cannot Add block")
}
return nil
}
func (repo *Repository) GetBlock(ctx context.Context, blockHeight int64) (*entity.Block, error) {
block, err := repo.queries.GetBlock(ctx, blockHeight)
if err != nil {
return nil, errors.Wrap(err, "Cannot get block")
}
return &entity.Block{
BlockHeight: block.BlockHeight,
BlockHash: block.BlockHash,
Module: block.Module,
}, nil
}
func (repo *Repository) GetLastProcessedBlock(ctx context.Context) (*entity.Block, error) {
block, err := repo.queries.GetLastProcessedBlock(ctx)
if err != nil {
return nil, errors.Wrap(err, "Cannot get last processed block")
}
return &entity.Block{
BlockHeight: block.BlockHeight,
BlockHash: block.BlockHash,
Module: block.Module,
}, nil
}
func (repo *Repository) RemoveBlockFrom(ctx context.Context, fromBlock int64) (int64, error) {
affected, err := repo.queries.RemoveBlockFrom(ctx, fromBlock)
if err != nil {
return 0, errors.Wrap(err, "Cannot remove blocks")
}
return affected, nil
}
func (repo *Repository) RemoveEventsFromBlock(ctx context.Context, fromBlock int64) (int64, error) {
affected, err := repo.queries.RemoveEventsFromBlock(ctx, fromBlock)
if err != nil {
return 0, errors.Wrap(err, "Cannot remove events")
}
return affected, nil
}
func (repo *Repository) ClearDelegate(ctx context.Context) (int64, error) {
affected, err := repo.queries.ClearDelegate(ctx)
if err != nil {
return 0, errors.Wrap(err, "Cannot clear delegate")
}
return affected, nil
}
func (repo *Repository) GetNodesByIds(ctx context.Context, arg datagateway.GetNodesByIdsParams) ([]entity.Node, error) {
nodes, err := repo.queries.GetNodesByIds(ctx, gen.GetNodesByIdsParams{
SaleBlock: int64(arg.SaleBlock),
SaleTxIndex: int32(arg.SaleTxIndex),
NodeIds: lo.Map(arg.NodeIds, func(item uint32, index int) int32 { return int32(item) }),
})
if err != nil {
return nil, errors.Wrap(err, "Cannot get nodes")
}
return mapNodes(nodes), nil
}
func (repo *Repository) CreateEvent(ctx context.Context, arg entity.NodeSaleEvent) error {
metaDataBytes := []byte("{}")
if arg.Metadata != nil {
metaDataBytes, _ = json.Marshal(arg.Metadata)
}
err := repo.queries.CreateEvent(ctx, gen.CreateEventParams{
TxHash: arg.TxHash,
BlockHeight: arg.BlockHeight,
TxIndex: arg.TxIndex,
WalletAddress: arg.WalletAddress,
Valid: arg.Valid,
Action: arg.Action,
RawMessage: arg.RawMessage,
ParsedMessage: arg.ParsedMessage,
BlockTimestamp: pgtype.Timestamp{Time: arg.BlockTimestamp.UTC(), Valid: true},
BlockHash: arg.BlockHash,
Metadata: metaDataBytes,
Reason: arg.Reason,
})
if err != nil {
return errors.Wrap(err, "Cannot add event")
}
return nil
}
func (repo *Repository) SetDelegates(ctx context.Context, arg datagateway.SetDelegatesParams) (int64, error) {
affected, err := repo.queries.SetDelegates(ctx, gen.SetDelegatesParams{
SaleBlock: int64(arg.SaleBlock),
SaleTxIndex: arg.SaleTxIndex,
Delegatee: arg.Delegatee,
DelegateTxHash: arg.DelegateTxHash,
NodeIds: lo.Map(arg.NodeIds, func(item uint32, index int) int32 { return int32(item) }),
})
if err != nil {
return 0, errors.Wrap(err, "Cannot set delegate")
}
return affected, nil
}
func (repo *Repository) CreateNodeSale(ctx context.Context, arg entity.NodeSale) error {
err := repo.queries.CreateNodeSale(ctx, gen.CreateNodeSaleParams{
BlockHeight: int64(arg.BlockHeight),
TxIndex: int32(arg.TxIndex),
Name: arg.Name,
StartsAt: pgtype.Timestamp{Time: arg.StartsAt.UTC(), Valid: true},
EndsAt: pgtype.Timestamp{Time: arg.EndsAt.UTC(), Valid: true},
Tiers: arg.Tiers,
SellerPublicKey: arg.SellerPublicKey,
MaxPerAddress: int32(arg.MaxPerAddress),
DeployTxHash: arg.DeployTxHash,
MaxDiscountPercentage: arg.MaxDiscountPercentage,
SellerWallet: arg.SellerWallet,
})
if err != nil {
return errors.Wrap(err, "Cannot add NodeSale")
}
return nil
}
func (repo *Repository) GetNodeSale(ctx context.Context, arg datagateway.GetNodeSaleParams) ([]entity.NodeSale, error) {
nodeSales, err := repo.queries.GetNodeSale(ctx, gen.GetNodeSaleParams{
BlockHeight: int64(arg.BlockHeight),
TxIndex: int32(arg.TxIndex),
})
if err != nil {
return nil, errors.Wrap(err, "Cannot get NodeSale")
}
return mapNodeSales(nodeSales), nil
}
func (repo *Repository) GetNodesByOwner(ctx context.Context, arg datagateway.GetNodesByOwnerParams) ([]entity.Node, error) {
nodes, err := repo.queries.GetNodesByOwner(ctx, gen.GetNodesByOwnerParams{
SaleBlock: int64(arg.SaleBlock),
SaleTxIndex: int32(arg.SaleTxIndex),
OwnerPublicKey: arg.OwnerPublicKey,
})
if err != nil {
return nil, errors.Wrap(err, "Cannot get nodes by owner")
}
return mapNodes(nodes), nil
}
func (repo *Repository) CreateNode(ctx context.Context, arg entity.Node) error {
err := repo.queries.CreateNode(ctx, gen.CreateNodeParams{
SaleBlock: int64(arg.SaleBlock),
SaleTxIndex: int32(arg.SaleTxIndex),
NodeID: int32(arg.NodeID),
TierIndex: arg.TierIndex,
DelegatedTo: arg.DelegatedTo,
OwnerPublicKey: arg.OwnerPublicKey,
PurchaseTxHash: arg.PurchaseTxHash,
DelegateTxHash: arg.DelegateTxHash,
})
if err != nil {
return errors.Wrap(err, "Cannot add node")
}
return nil
}
func (repo *Repository) GetNodeCountByTierIndex(ctx context.Context, arg datagateway.GetNodeCountByTierIndexParams) ([]datagateway.GetNodeCountByTierIndexRow, error) {
nodeCount, err := repo.queries.GetNodeCountByTierIndex(ctx, gen.GetNodeCountByTierIndexParams{
SaleBlock: int64(arg.SaleBlock),
SaleTxIndex: int32(arg.SaleTxIndex),
FromTier: int32(arg.FromTier),
ToTier: int32(arg.ToTier),
})
if err != nil {
return nil, errors.Wrap(err, "Cannot get node count by tier index")
}
return mapNodeCountByTierIndexRows(nodeCount), nil
}
func (repo *Repository) GetNodesByPubkey(ctx context.Context, arg datagateway.GetNodesByPubkeyParams) ([]entity.Node, error) {
nodes, err := repo.queries.GetNodesByPubkey(ctx, gen.GetNodesByPubkeyParams{
SaleBlock: arg.SaleBlock,
SaleTxIndex: arg.SaleTxIndex,
OwnerPublicKey: arg.OwnerPublicKey,
DelegatedTo: arg.DelegatedTo,
})
if err != nil {
return nil, errors.Wrap(err, "Cannot get nodes by public key")
}
return mapNodes(nodes), nil
}
func (repo *Repository) GetEventsByWallet(ctx context.Context, walletAddress string) ([]entity.NodeSaleEvent, error) {
events, err := repo.queries.GetEventsByWallet(ctx, walletAddress)
if err != nil {
return nil, errors.Wrap(err, "cannot get events by wallet")
}
return mapNodeSalesEvents(events), nil
}

View File

@@ -0,0 +1,62 @@
package postgres
import (
"context"
"github.com/cockroachdb/errors"
"github.com/gaze-network/indexer-network/modules/nodesale/datagateway"
"github.com/gaze-network/indexer-network/pkg/logger"
"github.com/jackc/pgx/v5"
)
var ErrTxAlreadyExists = errors.New("Transaction already exists. Call Commit() or Rollback() first.")
func (r *Repository) begin(ctx context.Context) (*Repository, error) {
if r.tx != nil {
return nil, errors.WithStack(ErrTxAlreadyExists)
}
tx, err := r.db.Begin(ctx)
if err != nil {
return nil, errors.Wrap(err, "failed to begin transaction")
}
return &Repository{
db: r.db,
queries: r.queries.WithTx(tx),
tx: tx,
}, nil
}
func (r *Repository) BeginNodeSaleTx(ctx context.Context) (datagateway.NodeSaleDataGatewayWithTx, error) {
repo, err := r.begin(ctx)
if err != nil {
return nil, errors.WithStack(err)
}
return repo, nil
}
func (r *Repository) Commit(ctx context.Context) error {
if r.tx == nil {
return nil
}
err := r.tx.Commit(ctx)
if err != nil {
return errors.Wrap(err, "failed to commit transaction")
}
r.tx = nil
return nil
}
func (r *Repository) Rollback(ctx context.Context) error {
if r.tx == nil {
return nil
}
err := r.tx.Rollback(ctx)
if err != nil && !errors.Is(err, pgx.ErrTxClosed) {
return errors.Wrap(err, "failed to rollback transaction")
}
if err == nil {
logger.DebugContext(ctx, "rolled back transaction")
}
r.tx = nil
return nil
}

View File

@@ -0,0 +1,25 @@
package nodesale
import "github.com/btcsuite/btcd/txscript"
func extractTapScript(witness [][]byte) (tokenizer txscript.ScriptTokenizer, controlBlock *txscript.ControlBlock, isTapScript bool) {
witness = removeAnnexFromWitness(witness)
if len(witness) < 2 {
return txscript.ScriptTokenizer{}, nil, false
}
script := witness[len(witness)-2]
rawControl := witness[len(witness)-1]
parsedControl, err := txscript.ParseControlBlock(rawControl)
if err != nil {
return txscript.ScriptTokenizer{}, nil, false
}
return txscript.MakeScriptTokenizer(0, script), parsedControl, true
}
func removeAnnexFromWitness(witness [][]byte) [][]byte {
if len(witness) >= 2 && len(witness[len(witness)-1]) > 0 && witness[len(witness)-1][0] == txscript.TaprootAnnexTag {
return witness[:len(witness)-1]
}
return witness
}

View File

@@ -72,6 +72,9 @@ func (h *HttpHandler) GetHolders(ctx *fiber.Ctx) (err error) {
runeEntry, err := h.usecase.GetRuneEntryByRuneIdAndHeight(ctx.UserContext(), runeId, blockHeight)
if err != nil {
if errors.Is(err, errs.NotFound) {
return errs.NewPublicError("rune not found")
}
return errors.Wrap(err, "error during GetHoldersByHeight")
}
holdingBalances, err := h.usecase.GetBalancesByRuneId(ctx.UserContext(), runeId, blockHeight)

View File

@@ -99,6 +99,9 @@ func (h *HttpHandler) GetTokenInfo(ctx *fiber.Ctx) (err error) {
runeEntry, err := h.usecase.GetRuneEntryByRuneIdAndHeight(ctx.UserContext(), runeId, blockHeight)
if err != nil {
if errors.Is(err, errs.NotFound) {
return errs.NewPublicError("rune not found")
}
return errors.Wrap(err, "error during GetTokenInfoByHeight")
}
holdingBalances, err := h.usecase.GetBalancesByRuneId(ctx.UserContext(), runeId, blockHeight)

View File

@@ -146,7 +146,7 @@ func (p *Processor) processTx(ctx context.Context, tx *types.Transaction, blockH
// find all non-OP_RETURN outputs
var destinations []int
for i, txOut := range tx.TxOut {
if txOut.IsOpReturn() {
if !txOut.IsOpReturn() {
destinations = append(destinations, i)
}
}
@@ -466,7 +466,7 @@ func (p *Processor) txCommitsToRune(ctx context.Context, tx *types.Transaction,
// It is impossible to verify that input utxo is a P2TR output with just the input.
// Need to verify with utxo's pk script.
prevTx, err := p.bitcoinClient.GetTransactionByHash(ctx, txIn.PreviousOutTxHash)
prevTx, blockHeight, err := p.bitcoinClient.GetRawTransactionAndHeightByTxHash(ctx, txIn.PreviousOutTxHash)
if err != nil && errors.Is(err, errs.NotFound) {
continue
}
@@ -479,7 +479,7 @@ func (p *Processor) txCommitsToRune(ctx context.Context, tx *types.Transaction,
break
}
// input must be mature enough
confirmations := tx.BlockHeight - prevTx.BlockHeight + 1
confirmations := tx.BlockHeight - blockHeight + 1
if confirmations < runes.RUNE_COMMIT_BLOCKS {
continue
}

View File

@@ -4,9 +4,9 @@ import (
"context"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/gaze-network/indexer-network/core/types"
"github.com/btcsuite/btcd/wire"
)
type Contract interface {
GetTransactionByHash(ctx context.Context, txHash chainhash.Hash) (*types.Transaction, error)
GetRawTransactionAndHeightByTxHash(ctx context.Context, txHash chainhash.Hash) (*wire.MsgTx, int64, error)
}

View File

@@ -13,10 +13,13 @@ import (
)
type Config struct {
WithRequestHeader bool `env:"REQUEST_HEADER" envDefault:"false" mapstructure:"request_header"`
WithRequestQuery bool `env:"REQUEST_QUERY" envDefault:"false" mapstructure:"request_query"`
Disable bool `env:"DISABLE" envDefault:"false" mapstructure:"disable"` // Disable logger level `INFO`
HiddenRequestHeaders []string `env:"HIDDEN_REQUEST_HEADERS" mapstructure:"hidden_request_headers"`
AllRequestHeaders bool `env:"REQUEST_HEADER" envDefault:"false" mapstructure:"request_header"` // Log all request headers
AllResponseHeaders bool `env:"RESPONSE_HEADER" envDefault:"false" mapstructure:"response_header"` // Log all response headers
AllRequestQueries bool `env:"REQUEST_QUERY" envDefault:"false" mapstructure:"request_query"` // Log all request queries
Disable bool `env:"DISABLE" envDefault:"false" mapstructure:"disable"` // Disable logger level `INFO`
HiddenRequestHeaders []string `env:"HIDDEN_REQUEST_HEADERS" mapstructure:"hidden_request_headers"` // Hide specific headers from log
WithRequestHeaders []string `env:"WITH_REQUEST_HEADERS" mapstructure:"with_request_headers"` // Add specific headers to log (higher priority than `HiddenRequestHeaders`)
With map[string]interface{} `env:"WITH" mapstructure:"with"` // Additional fields to log
}
// New setup request context and information
@@ -25,6 +28,10 @@ func New(config Config) fiber.Handler {
for _, header := range config.HiddenRequestHeaders {
hiddenRequestHeaders[strings.TrimSpace(strings.ToLower(header))] = struct{}{}
}
withRequestHeaders := make(map[string]struct{}, len(config.WithRequestHeaders))
for _, header := range config.WithRequestHeaders {
withRequestHeaders[strings.TrimSpace(strings.ToLower(header))] = struct{}{}
}
return func(c *fiber.Ctx) error {
start := time.Now()
@@ -41,6 +48,11 @@ func New(config Config) fiber.Handler {
slog.String("latencyHuman", latency.String()),
}
// add `with` fields
for k, v := range config.With {
baseAttrs = append(baseAttrs, slog.Any(k, v))
}
// prep request attributes
requestAttributes := []slog.Attr{
slog.Time("time", start),
@@ -64,23 +76,64 @@ func New(config Config) fiber.Handler {
slog.Int("length", len(c.Response().Body())),
}
// request query
if config.WithRequestQuery {
requestAttributes = append(requestAttributes, slog.String("query", string(c.Request().URI().QueryString())))
// request queries
if config.AllRequestQueries {
args := c.Request().URI().QueryArgs()
logAttrs := make([]any, 0, args.Len())
args.VisitAll(func(k, v []byte) {
logAttrs = append(logAttrs, slog.Any(string(k), string(v)))
})
requestAttributes = append(requestAttributes, slog.Group("queries", logAttrs...))
}
// request headers
if config.WithRequestHeader {
if config.AllRequestHeaders || len(config.WithRequestHeaders) > 0 {
kv := []any{}
for k, v := range c.GetReqHeaders() {
h := strings.ToLower(k)
// add headers for WithRequestHeaders
if _, found := withRequestHeaders[h]; found {
goto add
}
// skip hidden headers
if _, found := hiddenRequestHeaders[h]; found {
continue
}
// skip if not AllRequestHeaders
if !config.AllRequestHeaders {
continue
}
add:
val := any(v)
if len(v) == 1 {
val = v[0]
}
kv = append(kv, slog.Any(k, val))
}
requestAttributes = append(requestAttributes, slog.Group("headers", kv...))
}
if config.AllResponseHeaders {
kv := []any{}
for k, v := range c.GetRespHeaders() {
// skip hidden headers
if _, found := hiddenRequestHeaders[strings.ToLower(k)]; found {
continue
}
kv = append(kv, slog.Any(k, v))
}
requestAttributes = append(requestAttributes, slog.Group("header", kv...))
val := any(v)
if len(v) == 1 {
val = v[0]
}
kv = append(kv, slog.Any(k, val))
}
responseAttributes = append(responseAttributes, slog.Group("headers", kv...))
}
level := slog.LevelInfo

View File

@@ -17,3 +17,11 @@ sql:
sql_package: "pgx/v5"
rename:
id: "Id"
- schema: "./modules/nodesale/database/postgresql/migrations"
queries: "./modules/nodesale/database/postgresql/queries"
engine: "postgresql"
gen:
go:
package: "gen"
out: "./modules/nodesale/repository/postgres/gen"
sql_package: "pgx/v5"