From db5dc75c419b150f493faf474b393394a4f582b5 Mon Sep 17 00:00:00 2001 From: waiemwor <42979457+waiemwor@users.noreply.github.com> Date: Mon, 5 Aug 2024 11:33:20 +0700 Subject: [PATCH] Feature/nodesale (#40) * feat: recover nodesale module. * fix: refactored. * fix: fix table type. * fix: add entity * fix: bug UTC time. * ci: try to tidy before testing * ci: touch result file * ci: use echo to create new file * fix: try to skip test in ci * fix: remove os.Exit * fix: handle error * feat: add todo note * fix: Cannot run nodesale test because qtx is not initiated. * fix: 50% chance public key compare incorrectly. * fix: more consistent SQL * fix: sanity refactor. * fix: remove unused code. * fix: move last_block_default to config file. * fix: minor mistakes. * fix: * fix: refactor * fix: refactor * fix: delegate tx hash not record into db. * refactor: prepare for moving integration tests. * refactor: convert to unit tests. * fix: change to using input values since output values deducted fee. * feat: add extra unit test. * fix: wrong timestamp format. * fix: handle block timeout = 0 --------- Co-authored-by: Gaze --- .github/workflows/code-analysis.yml | 3 + cmd/cmd_run.go | 2 + cmd/cmd_version.go | 6 +- config.example.yaml | 8 + go.mod | 7 +- go.sum | 4 + internal/config/config.go | 4 +- modules/nodesale/api/httphandler/deploy.go | 99 ++ modules/nodesale/api/httphandler/events.go | 56 + modules/nodesale/api/httphandler/handler.go | 15 + modules/nodesale/api/httphandler/info.go | 26 + modules/nodesale/api/httphandler/nodes.go | 72 ++ modules/nodesale/api/httphandler/routes.go | 16 + modules/nodesale/config/config.go | 8 + .../migrations/000001_initialize.down.sql | 9 + .../migrations/000001_initialize.up.sql | 64 + .../database/postgresql/queries/blocks.sql | 15 + .../database/postgresql/queries/events.sql | 14 + .../database/postgresql/queries/nodes.sql | 51 + .../database/postgresql/queries/nodesales.sql | 9 + .../database/postgresql/queries/test.sql | 3 + .../mocks/NodeSaleDataGatewayWithTx.go | 1075 +++++++++++++++++ modules/nodesale/datagateway/nodesale.go | 76 ++ modules/nodesale/datagateway/tx.go | 12 + modules/nodesale/delegate.go | 61 + modules/nodesale/delegate_test.go | 84 ++ modules/nodesale/deploy.go | 67 + modules/nodesale/deploy_test.go | 139 +++ modules/nodesale/internal/entity/entity.go | 55 + .../internal/validator/delegate/validator.go | 51 + modules/nodesale/internal/validator/errors.go | 6 + .../internal/validator/purchase/errors.go | 17 + .../internal/validator/purchase/validator.go | 283 +++++ .../nodesale/internal/validator/validator.go | 44 + modules/nodesale/nodesale.go | 61 + modules/nodesale/nodesale_test.go | 61 + modules/nodesale/processor.go | 303 +++++ modules/nodesale/protobuf/nodesale.pb.go | 806 ++++++++++++ modules/nodesale/protobuf/nodesale.proto | 60 + modules/nodesale/pubkeyaddr.go | 12 + modules/nodesale/purchase.go | 87 ++ modules/nodesale/purchase_test.go | 902 ++++++++++++++ .../repository/postgres/gen/blocks.sql.go | 62 + .../nodesale/repository/postgres/gen/db.go | 32 + .../repository/postgres/gen/events.sql.go | 104 ++ .../repository/postgres/gen/models.go | 55 + .../repository/postgres/gen/nodes.sql.go | 271 +++++ .../repository/postgres/gen/nodesales.sql.go | 92 ++ .../repository/postgres/gen/test.sql.go | 20 + .../nodesale/repository/postgres/mapper.go | 74 ++ .../repository/postgres/repository.go | 236 ++++ modules/nodesale/repository/postgres/tx.go | 62 + modules/nodesale/tapscript.go | 25 + sqlc.yaml | 8 + 54 files changed, 5789 insertions(+), 5 deletions(-) create mode 100644 modules/nodesale/api/httphandler/deploy.go create mode 100644 modules/nodesale/api/httphandler/events.go create mode 100644 modules/nodesale/api/httphandler/handler.go create mode 100644 modules/nodesale/api/httphandler/info.go create mode 100644 modules/nodesale/api/httphandler/nodes.go create mode 100644 modules/nodesale/api/httphandler/routes.go create mode 100644 modules/nodesale/config/config.go create mode 100644 modules/nodesale/database/postgresql/migrations/000001_initialize.down.sql create mode 100644 modules/nodesale/database/postgresql/migrations/000001_initialize.up.sql create mode 100644 modules/nodesale/database/postgresql/queries/blocks.sql create mode 100644 modules/nodesale/database/postgresql/queries/events.sql create mode 100644 modules/nodesale/database/postgresql/queries/nodes.sql create mode 100644 modules/nodesale/database/postgresql/queries/nodesales.sql create mode 100644 modules/nodesale/database/postgresql/queries/test.sql create mode 100644 modules/nodesale/datagateway/mocks/NodeSaleDataGatewayWithTx.go create mode 100644 modules/nodesale/datagateway/nodesale.go create mode 100644 modules/nodesale/datagateway/tx.go create mode 100644 modules/nodesale/delegate.go create mode 100644 modules/nodesale/delegate_test.go create mode 100644 modules/nodesale/deploy.go create mode 100644 modules/nodesale/deploy_test.go create mode 100644 modules/nodesale/internal/entity/entity.go create mode 100644 modules/nodesale/internal/validator/delegate/validator.go create mode 100644 modules/nodesale/internal/validator/errors.go create mode 100644 modules/nodesale/internal/validator/purchase/errors.go create mode 100644 modules/nodesale/internal/validator/purchase/validator.go create mode 100644 modules/nodesale/internal/validator/validator.go create mode 100644 modules/nodesale/nodesale.go create mode 100644 modules/nodesale/nodesale_test.go create mode 100644 modules/nodesale/processor.go create mode 100644 modules/nodesale/protobuf/nodesale.pb.go create mode 100644 modules/nodesale/protobuf/nodesale.proto create mode 100644 modules/nodesale/pubkeyaddr.go create mode 100644 modules/nodesale/purchase.go create mode 100644 modules/nodesale/purchase_test.go create mode 100644 modules/nodesale/repository/postgres/gen/blocks.sql.go create mode 100644 modules/nodesale/repository/postgres/gen/db.go create mode 100644 modules/nodesale/repository/postgres/gen/events.sql.go create mode 100644 modules/nodesale/repository/postgres/gen/models.go create mode 100644 modules/nodesale/repository/postgres/gen/nodes.sql.go create mode 100644 modules/nodesale/repository/postgres/gen/nodesales.sql.go create mode 100644 modules/nodesale/repository/postgres/gen/test.sql.go create mode 100644 modules/nodesale/repository/postgres/mapper.go create mode 100644 modules/nodesale/repository/postgres/repository.go create mode 100644 modules/nodesale/repository/postgres/tx.go create mode 100644 modules/nodesale/tapscript.go diff --git a/.github/workflows/code-analysis.yml b/.github/workflows/code-analysis.yml index ccd15eb..cfbb4bb 100644 --- a/.github/workflows/code-analysis.yml +++ b/.github/workflows/code-analysis.yml @@ -58,6 +58,9 @@ jobs: cache: true # caching and restoring go modules and build outputs. - run: echo "GOVERSION=$(go version)" >> $GITHUB_ENV + - name: Touch test result file + run: echo "" > test_output.json + - name: Build run: go build -v ./... diff --git a/cmd/cmd_run.go b/cmd/cmd_run.go index ef1025c..61077eb 100644 --- a/cmd/cmd_run.go +++ b/cmd/cmd_run.go @@ -17,6 +17,7 @@ import ( "github.com/gaze-network/indexer-network/common/errs" "github.com/gaze-network/indexer-network/core/indexer" "github.com/gaze-network/indexer-network/internal/config" + "github.com/gaze-network/indexer-network/modules/nodesale" "github.com/gaze-network/indexer-network/modules/runes" "github.com/gaze-network/indexer-network/pkg/automaxprocs" "github.com/gaze-network/indexer-network/pkg/logger" @@ -39,6 +40,7 @@ import ( // Register Modules var Modules = do.Package( do.LazyNamed("runes", runes.New), + do.LazyNamed("nodesale", nodesale.New), ) func NewRunCommand() *cobra.Command { diff --git a/cmd/cmd_version.go b/cmd/cmd_version.go index b8b1558..308bed2 100644 --- a/cmd/cmd_version.go +++ b/cmd/cmd_version.go @@ -6,13 +6,15 @@ import ( "github.com/cockroachdb/errors" "github.com/gaze-network/indexer-network/common/errs" "github.com/gaze-network/indexer-network/core/constants" + "github.com/gaze-network/indexer-network/modules/nodesale" "github.com/gaze-network/indexer-network/modules/runes" "github.com/spf13/cobra" ) var versions = map[string]string{ - "": constants.Version, - "runes": runes.Version, + "": constants.Version, + "runes": runes.Version, + "nodesale": nodesale.Version, } type versionCmdOptions struct { diff --git a/config.example.yaml b/config.example.yaml index a910397..92b7090 100644 --- a/config.example.yaml +++ b/config.example.yaml @@ -47,3 +47,11 @@ modules: password: "password" db_name: "postgres" # url: "postgres://postgres:password@localhost:5432/postgres?sslmode=prefer" # [Optional] This will override other database credentials above. + nodesale: + postgres: + host: "localhost" + port: 5432 + user: "postgres" + password: "P@ssw0rd" + db_name: "postgres" + last_block_default: 400 \ No newline at end of file diff --git a/go.mod b/go.mod index cf8cf88..a86afdb 100644 --- a/go.mod +++ b/go.mod @@ -25,12 +25,15 @@ require ( github.com/valyala/fasthttp v1.51.0 go.uber.org/automaxprocs v1.5.3 golang.org/x/sync v0.7.0 + google.golang.org/protobuf v1.33.0 ) +require github.com/stretchr/objx v0.5.2 // indirect + require ( github.com/andybalholm/brotli v1.0.5 // indirect github.com/bitonicnl/verify-signed-message v0.7.1 - github.com/btcsuite/btcd/btcec/v2 v2.3.3 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.3 github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f // indirect github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd // indirect github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792 // indirect @@ -38,7 +41,7 @@ require ( github.com/cockroachdb/redact v1.1.5 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/decred/dcrd/crypto/blake256 v1.0.1 // indirect - github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 // indirect + github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/getsentry/sentry-go v0.18.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect diff --git a/go.sum b/go.sum index c5d7abb..c25ef41 100644 --- a/go.sum +++ b/go.sum @@ -221,6 +221,8 @@ github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMV github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= @@ -310,6 +312,8 @@ google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQ google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= diff --git a/internal/config/config.go b/internal/config/config.go index 367c679..6ea0d61 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -8,6 +8,7 @@ import ( "github.com/cockroachdb/errors" "github.com/gaze-network/indexer-network/common" + nodesaleconfig "github.com/gaze-network/indexer-network/modules/nodesale/config" runesconfig "github.com/gaze-network/indexer-network/modules/runes/config" "github.com/gaze-network/indexer-network/pkg/logger" "github.com/gaze-network/indexer-network/pkg/logger/slogx" @@ -61,7 +62,8 @@ type BitcoinNodeClient struct { } type Modules struct { - Runes runesconfig.Config `mapstructure:"runes"` + Runes runesconfig.Config `mapstructure:"runes"` + NodeSale nodesaleconfig.Config `mapstructure:"nodesale"` } type HTTPServerConfig struct { diff --git a/modules/nodesale/api/httphandler/deploy.go b/modules/nodesale/api/httphandler/deploy.go new file mode 100644 index 0000000..920f196 --- /dev/null +++ b/modules/nodesale/api/httphandler/deploy.go @@ -0,0 +1,99 @@ +package httphandler + +import ( + "fmt" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/common/errs" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/protobuf" + "github.com/gofiber/fiber/v2" + "google.golang.org/protobuf/encoding/protojson" +) + +type deployRequest struct { + DeployID string `params:"deployId"` +} + +type tierResponse struct { + PriceSat uint32 `json:"priceSat"` + Limit uint32 `json:"limit"` + MaxPerAddress uint32 `json:"maxPerAddress"` + Sold int64 `json:"sold"` +} + +type deployResponse struct { + Id string `json:"id"` + Name string `json:"name"` + StartsAt int64 `json:"startsAt"` + EndsAt int64 `json:"endsAt"` + Tiers []tierResponse `json:"tiers"` + SellerPublicKey string `json:"sellerPublicKey"` + MaxPerAddress uint32 `json:"maxPerAddress"` + DeployTxHash string `json:"deployTxHash"` +} + +func (h *handler) deployHandler(ctx *fiber.Ctx) error { + var request deployRequest + err := ctx.ParamsParser(&request) + if err != nil { + return errors.Wrap(err, "cannot parse param") + } + var blockHeight uint64 + var txIndex uint32 + count, err := fmt.Sscanf(request.DeployID, "%d-%d", &blockHeight, &txIndex) + if count != 2 || err != nil { + return errs.NewPublicError("Invalid deploy ID") + } + deploys, err := h.nodeSaleDg.GetNodeSale(ctx.UserContext(), datagateway.GetNodeSaleParams{ + BlockHeight: blockHeight, + TxIndex: txIndex, + }) + if err != nil { + return errors.Wrap(err, "Cannot get NodeSale from db") + } + if len(deploys) < 1 { + return errs.NewPublicError("NodeSale not found") + } + + deploy := deploys[0] + + nodeCount, err := h.nodeSaleDg.GetNodeCountByTierIndex(ctx.UserContext(), datagateway.GetNodeCountByTierIndexParams{ + SaleBlock: deploy.BlockHeight, + SaleTxIndex: deploy.TxIndex, + FromTier: 0, + ToTier: uint32(len(deploy.Tiers) - 1), + }) + if err != nil { + return errors.Wrap(err, "Cannot get node count from db") + } + + tiers := make([]protobuf.Tier, len(deploy.Tiers)) + tierResponses := make([]tierResponse, len(deploy.Tiers)) + for i, tierJson := range deploy.Tiers { + tier := &tiers[i] + err := protojson.Unmarshal(tierJson, tier) + if err != nil { + return errors.Wrap(err, "Failed to decode tiers json") + } + tierResponses[i].Limit = tiers[i].Limit + tierResponses[i].MaxPerAddress = tiers[i].MaxPerAddress + tierResponses[i].PriceSat = tiers[i].PriceSat + tierResponses[i].Sold = nodeCount[i].Count + } + + err = ctx.JSON(&deployResponse{ + Id: request.DeployID, + Name: deploy.Name, + StartsAt: deploy.StartsAt.UTC().Unix(), + EndsAt: deploy.EndsAt.UTC().Unix(), + Tiers: tierResponses, + SellerPublicKey: deploy.SellerPublicKey, + MaxPerAddress: deploy.MaxPerAddress, + DeployTxHash: deploy.DeployTxHash, + }) + if err != nil { + return errors.Wrap(err, "Go fiber cannot parse JSON") + } + return nil +} diff --git a/modules/nodesale/api/httphandler/events.go b/modules/nodesale/api/httphandler/events.go new file mode 100644 index 0000000..ff3c1e6 --- /dev/null +++ b/modules/nodesale/api/httphandler/events.go @@ -0,0 +1,56 @@ +package httphandler + +import ( + "encoding/json" + "time" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/modules/nodesale/protobuf" + "github.com/gofiber/fiber/v2" +) + +type eventRequest struct { + WalletAddress string `query:"walletAddress"` +} + +type eventResposne struct { + TxHash string `json:"txHash"` + BlockHeight int64 `json:"blockHeight"` + TxIndex int32 `json:"txIndex"` + WalletAddress string `json:"walletAddress"` + Action string `json:"action"` + ParsedMessage json.RawMessage `json:"parsedMessage"` + BlockTimestamp time.Time `json:"blockTimestamp"` + BlockHash string `json:"blockHash"` +} + +func (h *handler) eventsHandler(ctx *fiber.Ctx) error { + var request eventRequest + err := ctx.QueryParser(&request) + if err != nil { + return errors.Wrap(err, "cannot parse query") + } + + events, err := h.nodeSaleDg.GetEventsByWallet(ctx.UserContext(), request.WalletAddress) + if err != nil { + return errors.Wrap(err, "Can't get events from db") + } + + responses := make([]eventResposne, len(events)) + for i, event := range events { + responses[i].TxHash = event.TxHash + responses[i].BlockHeight = event.BlockHeight + responses[i].TxIndex = event.TxIndex + responses[i].WalletAddress = event.WalletAddress + responses[i].Action = protobuf.Action_name[event.Action] + responses[i].ParsedMessage = event.ParsedMessage + responses[i].BlockTimestamp = event.BlockTimestamp + responses[i].BlockHash = event.BlockHash + } + + err = ctx.JSON(responses) + if err != nil { + return errors.Wrap(err, "Go fiber cannot parse JSON") + } + return nil +} diff --git a/modules/nodesale/api/httphandler/handler.go b/modules/nodesale/api/httphandler/handler.go new file mode 100644 index 0000000..a072d36 --- /dev/null +++ b/modules/nodesale/api/httphandler/handler.go @@ -0,0 +1,15 @@ +package httphandler + +import ( + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" +) + +type handler struct { + nodeSaleDg datagateway.NodeSaleDataGateway +} + +func New(datagateway datagateway.NodeSaleDataGateway) *handler { + h := handler{} + h.nodeSaleDg = datagateway + return &h +} diff --git a/modules/nodesale/api/httphandler/info.go b/modules/nodesale/api/httphandler/info.go new file mode 100644 index 0000000..4bf51b6 --- /dev/null +++ b/modules/nodesale/api/httphandler/info.go @@ -0,0 +1,26 @@ +package httphandler + +import ( + "github.com/cockroachdb/errors" + "github.com/gofiber/fiber/v2" +) + +type infoResponse struct { + IndexedBlockHeight int64 `json:"indexedBlockHeight"` + IndexedBlockHash string `json:"indexedBlockHash"` +} + +func (h *handler) infoHandler(ctx *fiber.Ctx) error { + block, err := h.nodeSaleDg.GetLastProcessedBlock(ctx.UserContext()) + if err != nil { + return errors.Wrap(err, "Cannot get last processed block") + } + err = ctx.JSON(infoResponse{ + IndexedBlockHeight: block.BlockHeight, + IndexedBlockHash: block.BlockHash, + }) + if err != nil { + return errors.Wrap(err, "Go fiber cannot parse JSON") + } + return nil +} diff --git a/modules/nodesale/api/httphandler/nodes.go b/modules/nodesale/api/httphandler/nodes.go new file mode 100644 index 0000000..209d8cd --- /dev/null +++ b/modules/nodesale/api/httphandler/nodes.go @@ -0,0 +1,72 @@ +package httphandler + +import ( + "fmt" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/common/errs" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gofiber/fiber/v2" +) + +type nodeRequest struct { + DeployId string `query:"deployId"` + OwnerPublicKey string `query:"ownerPublicKey"` + DelegateePublicKey string `query:"delegateePublicKey"` +} + +type nodeResponse struct { + DeployId string `json:"deployId"` + NodeId uint32 `json:"nodeId"` + TierIndex int32 `json:"tierIndex"` + DelegatedTo string `json:"delegatedTo"` + OwnerPublicKey string `json:"ownerPublicKey"` + PurchaseTxHash string `json:"purchaseTxHash"` + DelegateTxHash string `json:"delegateTxHash"` + PurchaseBlockHeight int32 `json:"purchaseBlockHeight"` +} + +func (h *handler) nodesHandler(ctx *fiber.Ctx) error { + var request nodeRequest + err := ctx.QueryParser(&request) + if err != nil { + return errors.Wrap(err, "cannot parse query") + } + + ownerPublicKey := request.OwnerPublicKey + delegateePublicKey := request.DelegateePublicKey + + var blockHeight int64 + var txIndex int32 + count, err := fmt.Sscanf(request.DeployId, "%d-%d", &blockHeight, &txIndex) + if count != 2 || err != nil { + return errs.NewPublicError("Invalid deploy ID") + } + + nodes, err := h.nodeSaleDg.GetNodesByPubkey(ctx.UserContext(), datagateway.GetNodesByPubkeyParams{ + SaleBlock: blockHeight, + SaleTxIndex: txIndex, + OwnerPublicKey: ownerPublicKey, + DelegatedTo: delegateePublicKey, + }) + if err != nil { + return errors.Wrap(err, "Can't get nodes from db") + } + responses := make([]nodeResponse, len(nodes)) + for i, node := range nodes { + responses[i].DeployId = request.DeployId + responses[i].NodeId = node.NodeID + responses[i].TierIndex = node.TierIndex + responses[i].DelegatedTo = node.DelegatedTo + responses[i].OwnerPublicKey = node.OwnerPublicKey + responses[i].PurchaseTxHash = node.PurchaseTxHash + responses[i].DelegateTxHash = node.DelegateTxHash + responses[i].PurchaseBlockHeight = txIndex + } + + err = ctx.JSON(responses) + if err != nil { + return errors.Wrap(err, "Go fiber cannot parse JSON") + } + return nil +} diff --git a/modules/nodesale/api/httphandler/routes.go b/modules/nodesale/api/httphandler/routes.go new file mode 100644 index 0000000..2eceade --- /dev/null +++ b/modules/nodesale/api/httphandler/routes.go @@ -0,0 +1,16 @@ +package httphandler + +import ( + "github.com/gofiber/fiber/v2" +) + +func (h *handler) Mount(router fiber.Router) error { + r := router.Group("/nodesale/v1") + + r.Get("/info", h.infoHandler) + r.Get("/deploy/:deployId", h.deployHandler) + r.Get("/nodes", h.nodesHandler) + r.Get("/events", h.eventsHandler) + + return nil +} diff --git a/modules/nodesale/config/config.go b/modules/nodesale/config/config.go new file mode 100644 index 0000000..7d7b471 --- /dev/null +++ b/modules/nodesale/config/config.go @@ -0,0 +1,8 @@ +package config + +import "github.com/gaze-network/indexer-network/internal/postgres" + +type Config struct { + Postgres postgres.Config `mapstructure:"postgres"` + LastBlockDefault int64 `mapstructure:"last_block_default"` +} diff --git a/modules/nodesale/database/postgresql/migrations/000001_initialize.down.sql b/modules/nodesale/database/postgresql/migrations/000001_initialize.down.sql new file mode 100644 index 0000000..a1c08e3 --- /dev/null +++ b/modules/nodesale/database/postgresql/migrations/000001_initialize.down.sql @@ -0,0 +1,9 @@ +BEGIN; + +DROP TABLE IF EXISTS nodes; +DROP TABLE IF EXISTS node_sales; +DROP TABLE IF EXISTS events; +DROP TABLE IF EXISTS blocks; + + +COMMIT; \ No newline at end of file diff --git a/modules/nodesale/database/postgresql/migrations/000001_initialize.up.sql b/modules/nodesale/database/postgresql/migrations/000001_initialize.up.sql new file mode 100644 index 0000000..18b0735 --- /dev/null +++ b/modules/nodesale/database/postgresql/migrations/000001_initialize.up.sql @@ -0,0 +1,64 @@ +BEGIN; + +CREATE TABLE IF NOT EXISTS blocks ( + "block_height" BIGINT NOT NULL, + "block_hash" TEXT NOT NULL, + "module" TEXT NOT NULL, + PRIMARY KEY("block_height", "block_hash") +); + +CREATE TABLE IF NOT EXISTS events ( + "tx_hash" TEXT NOT NULL PRIMARY KEY, + "block_height" BIGINT NOT NULL, + "tx_index" INTEGER NOT NULL, + "wallet_address" TEXT NOT NULL, + "valid" BOOLEAN NOT NULL, + "action" INTEGER NOT NULL, + "raw_message" BYTEA NOT NULL, + "parsed_message" JSONB NOT NULL DEFAULT '{}', + "block_timestamp" TIMESTAMP NOT NULL, + "block_hash" TEXT NOT NULL, + "metadata" JSONB NOT NULL DEFAULT '{}', + "reason" TEXT NOT NULL DEFAULT '' +); + +INSERT INTO events("tx_hash", "block_height", "tx_index", + "wallet_address", "valid", "action", + "raw_message", "parsed_message", "block_timestamp", + "block_hash", "metadata") +VALUES ('', -1, -1, + '', false, -1, + '', '{}', NOW(), + '', '{}'); + +CREATE TABLE IF NOT EXISTS node_sales ( + "block_height" BIGINT NOT NULL, + "tx_index" INTEGER NOT NULL, + "name" TEXT NOT NULL, + "starts_at" TIMESTAMP NOT NULL, + "ends_at" TIMESTAMP NOT NULL, + "tiers" JSONB[] NOT NULL, + "seller_public_key" TEXT NOT NULL, + "max_per_address" INTEGER NOT NULL, + "deploy_tx_hash" TEXT NOT NULL REFERENCES events(tx_hash) ON DELETE CASCADE, + "max_discount_percentage" INTEGER NOT NULL, + "seller_wallet" TEXT NOT NULL, + PRIMARY KEY ("block_height", "tx_index") +); + +CREATE TABLE IF NOT EXISTS nodes ( + "sale_block" BIGINT NOT NULL, + "sale_tx_index" INTEGER NOT NULL, + "node_id" INTEGER NOT NULL, + "tier_index" INTEGER NOT NULL, + "delegated_to" TEXT NOT NULL DEFAULT '', + "owner_public_key" TEXT NOT NULL, + "purchase_tx_hash" TEXT NOT NULL REFERENCES events(tx_hash) ON DELETE CASCADE, + "delegate_tx_hash" TEXT NOT NULL DEFAULT '' REFERENCES events(tx_hash) ON DELETE SET DEFAULT, + PRIMARY KEY("sale_block", "sale_tx_index", "node_id"), + FOREIGN KEY("sale_block", "sale_tx_index") REFERENCES node_sales("block_height", "tx_index") +); + + + +COMMIT; \ No newline at end of file diff --git a/modules/nodesale/database/postgresql/queries/blocks.sql b/modules/nodesale/database/postgresql/queries/blocks.sql new file mode 100644 index 0000000..a4b4a1b --- /dev/null +++ b/modules/nodesale/database/postgresql/queries/blocks.sql @@ -0,0 +1,15 @@ +-- name: GetLastProcessedBlock :one +SELECT * FROM blocks ORDER BY block_height DESC LIMIT 1; + + +-- name: GetBlock :one +SELECT * FROM blocks +WHERE "block_height" = $1; + +-- name: RemoveBlockFrom :execrows +DELETE FROM blocks +WHERE "block_height" >= @from_block; + +-- name: CreateBlock :exec +INSERT INTO blocks ("block_height", "block_hash", "module") +VALUES ($1, $2, $3); \ No newline at end of file diff --git a/modules/nodesale/database/postgresql/queries/events.sql b/modules/nodesale/database/postgresql/queries/events.sql new file mode 100644 index 0000000..229ea57 --- /dev/null +++ b/modules/nodesale/database/postgresql/queries/events.sql @@ -0,0 +1,14 @@ +-- name: RemoveEventsFromBlock :execrows +DELETE FROM events +WHERE "block_height" >= @from_block; + +-- name: CreateEvent :exec +INSERT INTO events ("tx_hash", "block_height", "tx_index", "wallet_address", "valid", "action", + "raw_message", "parsed_message", "block_timestamp", "block_hash", "metadata", + "reason") +VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12); + +-- name: GetEventsByWallet :many +SELECT * +FROM events +WHERE wallet_address = $1; \ No newline at end of file diff --git a/modules/nodesale/database/postgresql/queries/nodes.sql b/modules/nodesale/database/postgresql/queries/nodes.sql new file mode 100644 index 0000000..bbd1e7f --- /dev/null +++ b/modules/nodesale/database/postgresql/queries/nodes.sql @@ -0,0 +1,51 @@ +-- name: ClearDelegate :execrows +UPDATE nodes +SET "delegated_to" = '' +WHERE "delegate_tx_hash" = ''; + +-- name: SetDelegates :execrows +UPDATE nodes +SET delegated_to = @delegatee, delegate_tx_hash = $3 +WHERE sale_block = $1 AND + sale_tx_index = $2 AND + node_id = ANY (@node_ids::int[]); + +-- name: GetNodesByIds :many +SELECT * +FROM nodes +WHERE sale_block = $1 AND + sale_tx_index = $2 AND + node_id = ANY (@node_ids::int[]); + + +-- name: GetNodesByOwner :many +SELECT * +FROM nodes +WHERE sale_block = $1 AND + sale_tx_index = $2 AND + owner_public_key = $3 +ORDER BY tier_index; + +-- name: GetNodesByPubkey :many +SELECT nodes.* +FROM nodes JOIN events ON nodes.purchase_tx_hash = events.tx_hash +WHERE sale_block = $1 AND + sale_tx_index = $2 AND + owner_public_key = $3 AND + delegated_to = $4; + +-- name: CreateNode :exec +INSERT INTO nodes (sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash) +VALUES ($1, $2, $3, $4, $5, $6, $7, $8); + +-- name: GetNodeCountByTierIndex :many +SELECT (tiers.tier_index)::int AS tier_index, count(nodes.tier_index) +FROM generate_series(@from_tier::int,@to_tier::int) AS tiers(tier_index) +LEFT JOIN + (SELECT * + FROM nodes + WHERE sale_block = $1 AND + sale_tx_index= $2) + AS nodes ON tiers.tier_index = nodes.tier_index +GROUP BY tiers.tier_index +ORDER BY tiers.tier_index; \ No newline at end of file diff --git a/modules/nodesale/database/postgresql/queries/nodesales.sql b/modules/nodesale/database/postgresql/queries/nodesales.sql new file mode 100644 index 0000000..1963eab --- /dev/null +++ b/modules/nodesale/database/postgresql/queries/nodesales.sql @@ -0,0 +1,9 @@ +-- name: CreateNodeSale :exec +INSERT INTO node_sales ("block_height", "tx_index", "name", "starts_at", "ends_at", "tiers", "seller_public_key", "max_per_address", "deploy_tx_hash", "max_discount_percentage", "seller_wallet") +VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11); + +-- name: GetNodeSale :many +SELECT * +FROM node_sales +WHERE block_height = $1 AND + tx_index = $2; \ No newline at end of file diff --git a/modules/nodesale/database/postgresql/queries/test.sql b/modules/nodesale/database/postgresql/queries/test.sql new file mode 100644 index 0000000..c1892e7 --- /dev/null +++ b/modules/nodesale/database/postgresql/queries/test.sql @@ -0,0 +1,3 @@ +-- name: ClearEvents :exec +DELETE FROM events +WHERE tx_hash <> ''; diff --git a/modules/nodesale/datagateway/mocks/NodeSaleDataGatewayWithTx.go b/modules/nodesale/datagateway/mocks/NodeSaleDataGatewayWithTx.go new file mode 100644 index 0000000..b7de213 --- /dev/null +++ b/modules/nodesale/datagateway/mocks/NodeSaleDataGatewayWithTx.go @@ -0,0 +1,1075 @@ +// Code generated by mockery v2.43.2. DO NOT EDIT. + +package mocks + +import ( + context "context" + + datagateway "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + entity "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + + mock "github.com/stretchr/testify/mock" +) + +// NodeSaleDataGatewayWithTx is an autogenerated mock type for the NodeSaleDataGatewayWithTx type +type NodeSaleDataGatewayWithTx struct { + mock.Mock +} + +type NodeSaleDataGatewayWithTx_Expecter struct { + mock *mock.Mock +} + +func (_m *NodeSaleDataGatewayWithTx) EXPECT() *NodeSaleDataGatewayWithTx_Expecter { + return &NodeSaleDataGatewayWithTx_Expecter{mock: &_m.Mock} +} + +// BeginNodeSaleTx provides a mock function with given fields: ctx +func (_m *NodeSaleDataGatewayWithTx) BeginNodeSaleTx(ctx context.Context) (datagateway.NodeSaleDataGatewayWithTx, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for BeginNodeSaleTx") + } + + var r0 datagateway.NodeSaleDataGatewayWithTx + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (datagateway.NodeSaleDataGatewayWithTx, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) datagateway.NodeSaleDataGatewayWithTx); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(datagateway.NodeSaleDataGatewayWithTx) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BeginNodeSaleTx' +type NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call struct { + *mock.Call +} + +// BeginNodeSaleTx is a helper method to define mock.On call +// - ctx context.Context +func (_e *NodeSaleDataGatewayWithTx_Expecter) BeginNodeSaleTx(ctx interface{}) *NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call { + return &NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call{Call: _e.mock.On("BeginNodeSaleTx", ctx)} +} + +func (_c *NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call) Run(run func(ctx context.Context)) *NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call) Return(_a0 datagateway.NodeSaleDataGatewayWithTx, _a1 error) *NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call) RunAndReturn(run func(context.Context) (datagateway.NodeSaleDataGatewayWithTx, error)) *NodeSaleDataGatewayWithTx_BeginNodeSaleTx_Call { + _c.Call.Return(run) + return _c +} + +// ClearDelegate provides a mock function with given fields: ctx +func (_m *NodeSaleDataGatewayWithTx) ClearDelegate(ctx context.Context) (int64, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for ClearDelegate") + } + + var r0 int64 + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (int64, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) int64); ok { + r0 = rf(ctx) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_ClearDelegate_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ClearDelegate' +type NodeSaleDataGatewayWithTx_ClearDelegate_Call struct { + *mock.Call +} + +// ClearDelegate is a helper method to define mock.On call +// - ctx context.Context +func (_e *NodeSaleDataGatewayWithTx_Expecter) ClearDelegate(ctx interface{}) *NodeSaleDataGatewayWithTx_ClearDelegate_Call { + return &NodeSaleDataGatewayWithTx_ClearDelegate_Call{Call: _e.mock.On("ClearDelegate", ctx)} +} + +func (_c *NodeSaleDataGatewayWithTx_ClearDelegate_Call) Run(run func(ctx context.Context)) *NodeSaleDataGatewayWithTx_ClearDelegate_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_ClearDelegate_Call) Return(_a0 int64, _a1 error) *NodeSaleDataGatewayWithTx_ClearDelegate_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_ClearDelegate_Call) RunAndReturn(run func(context.Context) (int64, error)) *NodeSaleDataGatewayWithTx_ClearDelegate_Call { + _c.Call.Return(run) + return _c +} + +// Commit provides a mock function with given fields: ctx +func (_m *NodeSaleDataGatewayWithTx) Commit(ctx context.Context) error { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for Commit") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context) error); ok { + r0 = rf(ctx) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NodeSaleDataGatewayWithTx_Commit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Commit' +type NodeSaleDataGatewayWithTx_Commit_Call struct { + *mock.Call +} + +// Commit is a helper method to define mock.On call +// - ctx context.Context +func (_e *NodeSaleDataGatewayWithTx_Expecter) Commit(ctx interface{}) *NodeSaleDataGatewayWithTx_Commit_Call { + return &NodeSaleDataGatewayWithTx_Commit_Call{Call: _e.mock.On("Commit", ctx)} +} + +func (_c *NodeSaleDataGatewayWithTx_Commit_Call) Run(run func(ctx context.Context)) *NodeSaleDataGatewayWithTx_Commit_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_Commit_Call) Return(_a0 error) *NodeSaleDataGatewayWithTx_Commit_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_Commit_Call) RunAndReturn(run func(context.Context) error) *NodeSaleDataGatewayWithTx_Commit_Call { + _c.Call.Return(run) + return _c +} + +// CreateBlock provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) CreateBlock(ctx context.Context, arg entity.Block) error { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for CreateBlock") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, entity.Block) error); ok { + r0 = rf(ctx, arg) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NodeSaleDataGatewayWithTx_CreateBlock_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateBlock' +type NodeSaleDataGatewayWithTx_CreateBlock_Call struct { + *mock.Call +} + +// CreateBlock is a helper method to define mock.On call +// - ctx context.Context +// - arg entity.Block +func (_e *NodeSaleDataGatewayWithTx_Expecter) CreateBlock(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_CreateBlock_Call { + return &NodeSaleDataGatewayWithTx_CreateBlock_Call{Call: _e.mock.On("CreateBlock", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_CreateBlock_Call) Run(run func(ctx context.Context, arg entity.Block)) *NodeSaleDataGatewayWithTx_CreateBlock_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(entity.Block)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_CreateBlock_Call) Return(_a0 error) *NodeSaleDataGatewayWithTx_CreateBlock_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_CreateBlock_Call) RunAndReturn(run func(context.Context, entity.Block) error) *NodeSaleDataGatewayWithTx_CreateBlock_Call { + _c.Call.Return(run) + return _c +} + +// CreateEvent provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) CreateEvent(ctx context.Context, arg entity.NodeSaleEvent) error { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for CreateEvent") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, entity.NodeSaleEvent) error); ok { + r0 = rf(ctx, arg) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NodeSaleDataGatewayWithTx_CreateEvent_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateEvent' +type NodeSaleDataGatewayWithTx_CreateEvent_Call struct { + *mock.Call +} + +// CreateEvent is a helper method to define mock.On call +// - ctx context.Context +// - arg entity.NodeSaleEvent +func (_e *NodeSaleDataGatewayWithTx_Expecter) CreateEvent(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_CreateEvent_Call { + return &NodeSaleDataGatewayWithTx_CreateEvent_Call{Call: _e.mock.On("CreateEvent", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_CreateEvent_Call) Run(run func(ctx context.Context, arg entity.NodeSaleEvent)) *NodeSaleDataGatewayWithTx_CreateEvent_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(entity.NodeSaleEvent)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_CreateEvent_Call) Return(_a0 error) *NodeSaleDataGatewayWithTx_CreateEvent_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_CreateEvent_Call) RunAndReturn(run func(context.Context, entity.NodeSaleEvent) error) *NodeSaleDataGatewayWithTx_CreateEvent_Call { + _c.Call.Return(run) + return _c +} + +// CreateNode provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) CreateNode(ctx context.Context, arg entity.Node) error { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for CreateNode") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, entity.Node) error); ok { + r0 = rf(ctx, arg) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NodeSaleDataGatewayWithTx_CreateNode_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateNode' +type NodeSaleDataGatewayWithTx_CreateNode_Call struct { + *mock.Call +} + +// CreateNode is a helper method to define mock.On call +// - ctx context.Context +// - arg entity.Node +func (_e *NodeSaleDataGatewayWithTx_Expecter) CreateNode(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_CreateNode_Call { + return &NodeSaleDataGatewayWithTx_CreateNode_Call{Call: _e.mock.On("CreateNode", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_CreateNode_Call) Run(run func(ctx context.Context, arg entity.Node)) *NodeSaleDataGatewayWithTx_CreateNode_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(entity.Node)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_CreateNode_Call) Return(_a0 error) *NodeSaleDataGatewayWithTx_CreateNode_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_CreateNode_Call) RunAndReturn(run func(context.Context, entity.Node) error) *NodeSaleDataGatewayWithTx_CreateNode_Call { + _c.Call.Return(run) + return _c +} + +// CreateNodeSale provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) CreateNodeSale(ctx context.Context, arg entity.NodeSale) error { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for CreateNodeSale") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, entity.NodeSale) error); ok { + r0 = rf(ctx, arg) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NodeSaleDataGatewayWithTx_CreateNodeSale_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateNodeSale' +type NodeSaleDataGatewayWithTx_CreateNodeSale_Call struct { + *mock.Call +} + +// CreateNodeSale is a helper method to define mock.On call +// - ctx context.Context +// - arg entity.NodeSale +func (_e *NodeSaleDataGatewayWithTx_Expecter) CreateNodeSale(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_CreateNodeSale_Call { + return &NodeSaleDataGatewayWithTx_CreateNodeSale_Call{Call: _e.mock.On("CreateNodeSale", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_CreateNodeSale_Call) Run(run func(ctx context.Context, arg entity.NodeSale)) *NodeSaleDataGatewayWithTx_CreateNodeSale_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(entity.NodeSale)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_CreateNodeSale_Call) Return(_a0 error) *NodeSaleDataGatewayWithTx_CreateNodeSale_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_CreateNodeSale_Call) RunAndReturn(run func(context.Context, entity.NodeSale) error) *NodeSaleDataGatewayWithTx_CreateNodeSale_Call { + _c.Call.Return(run) + return _c +} + +// GetBlock provides a mock function with given fields: ctx, blockHeight +func (_m *NodeSaleDataGatewayWithTx) GetBlock(ctx context.Context, blockHeight int64) (*entity.Block, error) { + ret := _m.Called(ctx, blockHeight) + + if len(ret) == 0 { + panic("no return value specified for GetBlock") + } + + var r0 *entity.Block + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, int64) (*entity.Block, error)); ok { + return rf(ctx, blockHeight) + } + if rf, ok := ret.Get(0).(func(context.Context, int64) *entity.Block); ok { + r0 = rf(ctx, blockHeight) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*entity.Block) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { + r1 = rf(ctx, blockHeight) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_GetBlock_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetBlock' +type NodeSaleDataGatewayWithTx_GetBlock_Call struct { + *mock.Call +} + +// GetBlock is a helper method to define mock.On call +// - ctx context.Context +// - blockHeight int64 +func (_e *NodeSaleDataGatewayWithTx_Expecter) GetBlock(ctx interface{}, blockHeight interface{}) *NodeSaleDataGatewayWithTx_GetBlock_Call { + return &NodeSaleDataGatewayWithTx_GetBlock_Call{Call: _e.mock.On("GetBlock", ctx, blockHeight)} +} + +func (_c *NodeSaleDataGatewayWithTx_GetBlock_Call) Run(run func(ctx context.Context, blockHeight int64)) *NodeSaleDataGatewayWithTx_GetBlock_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetBlock_Call) Return(_a0 *entity.Block, _a1 error) *NodeSaleDataGatewayWithTx_GetBlock_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetBlock_Call) RunAndReturn(run func(context.Context, int64) (*entity.Block, error)) *NodeSaleDataGatewayWithTx_GetBlock_Call { + _c.Call.Return(run) + return _c +} + +// GetEventsByWallet provides a mock function with given fields: ctx, walletAddress +func (_m *NodeSaleDataGatewayWithTx) GetEventsByWallet(ctx context.Context, walletAddress string) ([]entity.NodeSaleEvent, error) { + ret := _m.Called(ctx, walletAddress) + + if len(ret) == 0 { + panic("no return value specified for GetEventsByWallet") + } + + var r0 []entity.NodeSaleEvent + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) ([]entity.NodeSaleEvent, error)); ok { + return rf(ctx, walletAddress) + } + if rf, ok := ret.Get(0).(func(context.Context, string) []entity.NodeSaleEvent); ok { + r0 = rf(ctx, walletAddress) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]entity.NodeSaleEvent) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, walletAddress) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_GetEventsByWallet_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEventsByWallet' +type NodeSaleDataGatewayWithTx_GetEventsByWallet_Call struct { + *mock.Call +} + +// GetEventsByWallet is a helper method to define mock.On call +// - ctx context.Context +// - walletAddress string +func (_e *NodeSaleDataGatewayWithTx_Expecter) GetEventsByWallet(ctx interface{}, walletAddress interface{}) *NodeSaleDataGatewayWithTx_GetEventsByWallet_Call { + return &NodeSaleDataGatewayWithTx_GetEventsByWallet_Call{Call: _e.mock.On("GetEventsByWallet", ctx, walletAddress)} +} + +func (_c *NodeSaleDataGatewayWithTx_GetEventsByWallet_Call) Run(run func(ctx context.Context, walletAddress string)) *NodeSaleDataGatewayWithTx_GetEventsByWallet_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetEventsByWallet_Call) Return(_a0 []entity.NodeSaleEvent, _a1 error) *NodeSaleDataGatewayWithTx_GetEventsByWallet_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetEventsByWallet_Call) RunAndReturn(run func(context.Context, string) ([]entity.NodeSaleEvent, error)) *NodeSaleDataGatewayWithTx_GetEventsByWallet_Call { + _c.Call.Return(run) + return _c +} + +// GetLastProcessedBlock provides a mock function with given fields: ctx +func (_m *NodeSaleDataGatewayWithTx) GetLastProcessedBlock(ctx context.Context) (*entity.Block, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for GetLastProcessedBlock") + } + + var r0 *entity.Block + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (*entity.Block, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) *entity.Block); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*entity.Block) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetLastProcessedBlock' +type NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call struct { + *mock.Call +} + +// GetLastProcessedBlock is a helper method to define mock.On call +// - ctx context.Context +func (_e *NodeSaleDataGatewayWithTx_Expecter) GetLastProcessedBlock(ctx interface{}) *NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call { + return &NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call{Call: _e.mock.On("GetLastProcessedBlock", ctx)} +} + +func (_c *NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call) Run(run func(ctx context.Context)) *NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call) Return(_a0 *entity.Block, _a1 error) *NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call) RunAndReturn(run func(context.Context) (*entity.Block, error)) *NodeSaleDataGatewayWithTx_GetLastProcessedBlock_Call { + _c.Call.Return(run) + return _c +} + +// GetNodeCountByTierIndex provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) GetNodeCountByTierIndex(ctx context.Context, arg datagateway.GetNodeCountByTierIndexParams) ([]datagateway.GetNodeCountByTierIndexRow, error) { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for GetNodeCountByTierIndex") + } + + var r0 []datagateway.GetNodeCountByTierIndexRow + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodeCountByTierIndexParams) ([]datagateway.GetNodeCountByTierIndexRow, error)); ok { + return rf(ctx, arg) + } + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodeCountByTierIndexParams) []datagateway.GetNodeCountByTierIndexRow); ok { + r0 = rf(ctx, arg) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]datagateway.GetNodeCountByTierIndexRow) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, datagateway.GetNodeCountByTierIndexParams) error); ok { + r1 = rf(ctx, arg) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetNodeCountByTierIndex' +type NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call struct { + *mock.Call +} + +// GetNodeCountByTierIndex is a helper method to define mock.On call +// - ctx context.Context +// - arg datagateway.GetNodeCountByTierIndexParams +func (_e *NodeSaleDataGatewayWithTx_Expecter) GetNodeCountByTierIndex(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call { + return &NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call{Call: _e.mock.On("GetNodeCountByTierIndex", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call) Run(run func(ctx context.Context, arg datagateway.GetNodeCountByTierIndexParams)) *NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(datagateway.GetNodeCountByTierIndexParams)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call) Return(_a0 []datagateway.GetNodeCountByTierIndexRow, _a1 error) *NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call) RunAndReturn(run func(context.Context, datagateway.GetNodeCountByTierIndexParams) ([]datagateway.GetNodeCountByTierIndexRow, error)) *NodeSaleDataGatewayWithTx_GetNodeCountByTierIndex_Call { + _c.Call.Return(run) + return _c +} + +// GetNodeSale provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) GetNodeSale(ctx context.Context, arg datagateway.GetNodeSaleParams) ([]entity.NodeSale, error) { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for GetNodeSale") + } + + var r0 []entity.NodeSale + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodeSaleParams) ([]entity.NodeSale, error)); ok { + return rf(ctx, arg) + } + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodeSaleParams) []entity.NodeSale); ok { + r0 = rf(ctx, arg) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]entity.NodeSale) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, datagateway.GetNodeSaleParams) error); ok { + r1 = rf(ctx, arg) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_GetNodeSale_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetNodeSale' +type NodeSaleDataGatewayWithTx_GetNodeSale_Call struct { + *mock.Call +} + +// GetNodeSale is a helper method to define mock.On call +// - ctx context.Context +// - arg datagateway.GetNodeSaleParams +func (_e *NodeSaleDataGatewayWithTx_Expecter) GetNodeSale(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_GetNodeSale_Call { + return &NodeSaleDataGatewayWithTx_GetNodeSale_Call{Call: _e.mock.On("GetNodeSale", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodeSale_Call) Run(run func(ctx context.Context, arg datagateway.GetNodeSaleParams)) *NodeSaleDataGatewayWithTx_GetNodeSale_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(datagateway.GetNodeSaleParams)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodeSale_Call) Return(_a0 []entity.NodeSale, _a1 error) *NodeSaleDataGatewayWithTx_GetNodeSale_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodeSale_Call) RunAndReturn(run func(context.Context, datagateway.GetNodeSaleParams) ([]entity.NodeSale, error)) *NodeSaleDataGatewayWithTx_GetNodeSale_Call { + _c.Call.Return(run) + return _c +} + +// GetNodesByIds provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) GetNodesByIds(ctx context.Context, arg datagateway.GetNodesByIdsParams) ([]entity.Node, error) { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for GetNodesByIds") + } + + var r0 []entity.Node + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodesByIdsParams) ([]entity.Node, error)); ok { + return rf(ctx, arg) + } + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodesByIdsParams) []entity.Node); ok { + r0 = rf(ctx, arg) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]entity.Node) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, datagateway.GetNodesByIdsParams) error); ok { + r1 = rf(ctx, arg) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_GetNodesByIds_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetNodesByIds' +type NodeSaleDataGatewayWithTx_GetNodesByIds_Call struct { + *mock.Call +} + +// GetNodesByIds is a helper method to define mock.On call +// - ctx context.Context +// - arg datagateway.GetNodesByIdsParams +func (_e *NodeSaleDataGatewayWithTx_Expecter) GetNodesByIds(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_GetNodesByIds_Call { + return &NodeSaleDataGatewayWithTx_GetNodesByIds_Call{Call: _e.mock.On("GetNodesByIds", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodesByIds_Call) Run(run func(ctx context.Context, arg datagateway.GetNodesByIdsParams)) *NodeSaleDataGatewayWithTx_GetNodesByIds_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(datagateway.GetNodesByIdsParams)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodesByIds_Call) Return(_a0 []entity.Node, _a1 error) *NodeSaleDataGatewayWithTx_GetNodesByIds_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodesByIds_Call) RunAndReturn(run func(context.Context, datagateway.GetNodesByIdsParams) ([]entity.Node, error)) *NodeSaleDataGatewayWithTx_GetNodesByIds_Call { + _c.Call.Return(run) + return _c +} + +// GetNodesByOwner provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) GetNodesByOwner(ctx context.Context, arg datagateway.GetNodesByOwnerParams) ([]entity.Node, error) { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for GetNodesByOwner") + } + + var r0 []entity.Node + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodesByOwnerParams) ([]entity.Node, error)); ok { + return rf(ctx, arg) + } + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodesByOwnerParams) []entity.Node); ok { + r0 = rf(ctx, arg) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]entity.Node) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, datagateway.GetNodesByOwnerParams) error); ok { + r1 = rf(ctx, arg) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_GetNodesByOwner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetNodesByOwner' +type NodeSaleDataGatewayWithTx_GetNodesByOwner_Call struct { + *mock.Call +} + +// GetNodesByOwner is a helper method to define mock.On call +// - ctx context.Context +// - arg datagateway.GetNodesByOwnerParams +func (_e *NodeSaleDataGatewayWithTx_Expecter) GetNodesByOwner(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_GetNodesByOwner_Call { + return &NodeSaleDataGatewayWithTx_GetNodesByOwner_Call{Call: _e.mock.On("GetNodesByOwner", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodesByOwner_Call) Run(run func(ctx context.Context, arg datagateway.GetNodesByOwnerParams)) *NodeSaleDataGatewayWithTx_GetNodesByOwner_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(datagateway.GetNodesByOwnerParams)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodesByOwner_Call) Return(_a0 []entity.Node, _a1 error) *NodeSaleDataGatewayWithTx_GetNodesByOwner_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodesByOwner_Call) RunAndReturn(run func(context.Context, datagateway.GetNodesByOwnerParams) ([]entity.Node, error)) *NodeSaleDataGatewayWithTx_GetNodesByOwner_Call { + _c.Call.Return(run) + return _c +} + +// GetNodesByPubkey provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) GetNodesByPubkey(ctx context.Context, arg datagateway.GetNodesByPubkeyParams) ([]entity.Node, error) { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for GetNodesByPubkey") + } + + var r0 []entity.Node + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodesByPubkeyParams) ([]entity.Node, error)); ok { + return rf(ctx, arg) + } + if rf, ok := ret.Get(0).(func(context.Context, datagateway.GetNodesByPubkeyParams) []entity.Node); ok { + r0 = rf(ctx, arg) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]entity.Node) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, datagateway.GetNodesByPubkeyParams) error); ok { + r1 = rf(ctx, arg) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetNodesByPubkey' +type NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call struct { + *mock.Call +} + +// GetNodesByPubkey is a helper method to define mock.On call +// - ctx context.Context +// - arg datagateway.GetNodesByPubkeyParams +func (_e *NodeSaleDataGatewayWithTx_Expecter) GetNodesByPubkey(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call { + return &NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call{Call: _e.mock.On("GetNodesByPubkey", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call) Run(run func(ctx context.Context, arg datagateway.GetNodesByPubkeyParams)) *NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(datagateway.GetNodesByPubkeyParams)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call) Return(_a0 []entity.Node, _a1 error) *NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call) RunAndReturn(run func(context.Context, datagateway.GetNodesByPubkeyParams) ([]entity.Node, error)) *NodeSaleDataGatewayWithTx_GetNodesByPubkey_Call { + _c.Call.Return(run) + return _c +} + +// RemoveBlockFrom provides a mock function with given fields: ctx, fromBlock +func (_m *NodeSaleDataGatewayWithTx) RemoveBlockFrom(ctx context.Context, fromBlock int64) (int64, error) { + ret := _m.Called(ctx, fromBlock) + + if len(ret) == 0 { + panic("no return value specified for RemoveBlockFrom") + } + + var r0 int64 + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, int64) (int64, error)); ok { + return rf(ctx, fromBlock) + } + if rf, ok := ret.Get(0).(func(context.Context, int64) int64); ok { + r0 = rf(ctx, fromBlock) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { + r1 = rf(ctx, fromBlock) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveBlockFrom' +type NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call struct { + *mock.Call +} + +// RemoveBlockFrom is a helper method to define mock.On call +// - ctx context.Context +// - fromBlock int64 +func (_e *NodeSaleDataGatewayWithTx_Expecter) RemoveBlockFrom(ctx interface{}, fromBlock interface{}) *NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call { + return &NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call{Call: _e.mock.On("RemoveBlockFrom", ctx, fromBlock)} +} + +func (_c *NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call) Run(run func(ctx context.Context, fromBlock int64)) *NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call) Return(_a0 int64, _a1 error) *NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call) RunAndReturn(run func(context.Context, int64) (int64, error)) *NodeSaleDataGatewayWithTx_RemoveBlockFrom_Call { + _c.Call.Return(run) + return _c +} + +// RemoveEventsFromBlock provides a mock function with given fields: ctx, fromBlock +func (_m *NodeSaleDataGatewayWithTx) RemoveEventsFromBlock(ctx context.Context, fromBlock int64) (int64, error) { + ret := _m.Called(ctx, fromBlock) + + if len(ret) == 0 { + panic("no return value specified for RemoveEventsFromBlock") + } + + var r0 int64 + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, int64) (int64, error)); ok { + return rf(ctx, fromBlock) + } + if rf, ok := ret.Get(0).(func(context.Context, int64) int64); ok { + r0 = rf(ctx, fromBlock) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { + r1 = rf(ctx, fromBlock) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveEventsFromBlock' +type NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call struct { + *mock.Call +} + +// RemoveEventsFromBlock is a helper method to define mock.On call +// - ctx context.Context +// - fromBlock int64 +func (_e *NodeSaleDataGatewayWithTx_Expecter) RemoveEventsFromBlock(ctx interface{}, fromBlock interface{}) *NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call { + return &NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call{Call: _e.mock.On("RemoveEventsFromBlock", ctx, fromBlock)} +} + +func (_c *NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call) Run(run func(ctx context.Context, fromBlock int64)) *NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int64)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call) Return(_a0 int64, _a1 error) *NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call) RunAndReturn(run func(context.Context, int64) (int64, error)) *NodeSaleDataGatewayWithTx_RemoveEventsFromBlock_Call { + _c.Call.Return(run) + return _c +} + +// Rollback provides a mock function with given fields: ctx +func (_m *NodeSaleDataGatewayWithTx) Rollback(ctx context.Context) error { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for Rollback") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context) error); ok { + r0 = rf(ctx) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NodeSaleDataGatewayWithTx_Rollback_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Rollback' +type NodeSaleDataGatewayWithTx_Rollback_Call struct { + *mock.Call +} + +// Rollback is a helper method to define mock.On call +// - ctx context.Context +func (_e *NodeSaleDataGatewayWithTx_Expecter) Rollback(ctx interface{}) *NodeSaleDataGatewayWithTx_Rollback_Call { + return &NodeSaleDataGatewayWithTx_Rollback_Call{Call: _e.mock.On("Rollback", ctx)} +} + +func (_c *NodeSaleDataGatewayWithTx_Rollback_Call) Run(run func(ctx context.Context)) *NodeSaleDataGatewayWithTx_Rollback_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_Rollback_Call) Return(_a0 error) *NodeSaleDataGatewayWithTx_Rollback_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_Rollback_Call) RunAndReturn(run func(context.Context) error) *NodeSaleDataGatewayWithTx_Rollback_Call { + _c.Call.Return(run) + return _c +} + +// SetDelegates provides a mock function with given fields: ctx, arg +func (_m *NodeSaleDataGatewayWithTx) SetDelegates(ctx context.Context, arg datagateway.SetDelegatesParams) (int64, error) { + ret := _m.Called(ctx, arg) + + if len(ret) == 0 { + panic("no return value specified for SetDelegates") + } + + var r0 int64 + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, datagateway.SetDelegatesParams) (int64, error)); ok { + return rf(ctx, arg) + } + if rf, ok := ret.Get(0).(func(context.Context, datagateway.SetDelegatesParams) int64); ok { + r0 = rf(ctx, arg) + } else { + r0 = ret.Get(0).(int64) + } + + if rf, ok := ret.Get(1).(func(context.Context, datagateway.SetDelegatesParams) error); ok { + r1 = rf(ctx, arg) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NodeSaleDataGatewayWithTx_SetDelegates_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetDelegates' +type NodeSaleDataGatewayWithTx_SetDelegates_Call struct { + *mock.Call +} + +// SetDelegates is a helper method to define mock.On call +// - ctx context.Context +// - arg datagateway.SetDelegatesParams +func (_e *NodeSaleDataGatewayWithTx_Expecter) SetDelegates(ctx interface{}, arg interface{}) *NodeSaleDataGatewayWithTx_SetDelegates_Call { + return &NodeSaleDataGatewayWithTx_SetDelegates_Call{Call: _e.mock.On("SetDelegates", ctx, arg)} +} + +func (_c *NodeSaleDataGatewayWithTx_SetDelegates_Call) Run(run func(ctx context.Context, arg datagateway.SetDelegatesParams)) *NodeSaleDataGatewayWithTx_SetDelegates_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(datagateway.SetDelegatesParams)) + }) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_SetDelegates_Call) Return(_a0 int64, _a1 error) *NodeSaleDataGatewayWithTx_SetDelegates_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *NodeSaleDataGatewayWithTx_SetDelegates_Call) RunAndReturn(run func(context.Context, datagateway.SetDelegatesParams) (int64, error)) *NodeSaleDataGatewayWithTx_SetDelegates_Call { + _c.Call.Return(run) + return _c +} + +// NewNodeSaleDataGatewayWithTx creates a new instance of NodeSaleDataGatewayWithTx. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewNodeSaleDataGatewayWithTx(t interface { + mock.TestingT + Cleanup(func()) +}) *NodeSaleDataGatewayWithTx { + mock := &NodeSaleDataGatewayWithTx{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/modules/nodesale/datagateway/nodesale.go b/modules/nodesale/datagateway/nodesale.go new file mode 100644 index 0000000..95f4e3a --- /dev/null +++ b/modules/nodesale/datagateway/nodesale.go @@ -0,0 +1,76 @@ +package datagateway + +import ( + "context" + + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" +) + +type NodeSaleDataGateway interface { + BeginNodeSaleTx(ctx context.Context) (NodeSaleDataGatewayWithTx, error) + CreateBlock(ctx context.Context, arg entity.Block) error + GetBlock(ctx context.Context, blockHeight int64) (*entity.Block, error) + GetLastProcessedBlock(ctx context.Context) (*entity.Block, error) + RemoveBlockFrom(ctx context.Context, fromBlock int64) (int64, error) + RemoveEventsFromBlock(ctx context.Context, fromBlock int64) (int64, error) + ClearDelegate(ctx context.Context) (int64, error) + GetNodesByIds(ctx context.Context, arg GetNodesByIdsParams) ([]entity.Node, error) + CreateEvent(ctx context.Context, arg entity.NodeSaleEvent) error + SetDelegates(ctx context.Context, arg SetDelegatesParams) (int64, error) + CreateNodeSale(ctx context.Context, arg entity.NodeSale) error + GetNodeSale(ctx context.Context, arg GetNodeSaleParams) ([]entity.NodeSale, error) + GetNodesByOwner(ctx context.Context, arg GetNodesByOwnerParams) ([]entity.Node, error) + CreateNode(ctx context.Context, arg entity.Node) error + GetNodeCountByTierIndex(ctx context.Context, arg GetNodeCountByTierIndexParams) ([]GetNodeCountByTierIndexRow, error) + GetNodesByPubkey(ctx context.Context, arg GetNodesByPubkeyParams) ([]entity.Node, error) + GetEventsByWallet(ctx context.Context, walletAddress string) ([]entity.NodeSaleEvent, error) +} + +type NodeSaleDataGatewayWithTx interface { + NodeSaleDataGateway + Tx +} + +type GetNodesByIdsParams struct { + SaleBlock uint64 + SaleTxIndex uint32 + NodeIds []uint32 +} + +type SetDelegatesParams struct { + SaleBlock uint64 + SaleTxIndex int32 + Delegatee string + DelegateTxHash string + NodeIds []uint32 +} + +type GetNodeSaleParams struct { + BlockHeight uint64 + TxIndex uint32 +} + +type GetNodesByOwnerParams struct { + SaleBlock uint64 + SaleTxIndex uint32 + OwnerPublicKey string +} + +type GetNodeCountByTierIndexParams struct { + SaleBlock uint64 + SaleTxIndex uint32 + FromTier uint32 + ToTier uint32 +} + +type GetNodeCountByTierIndexRow struct { + TierIndex int32 + Count int64 +} + +type GetNodesByPubkeyParams struct { + SaleBlock int64 + SaleTxIndex int32 + OwnerPublicKey string + DelegatedTo string +} diff --git a/modules/nodesale/datagateway/tx.go b/modules/nodesale/datagateway/tx.go new file mode 100644 index 0000000..56455f6 --- /dev/null +++ b/modules/nodesale/datagateway/tx.go @@ -0,0 +1,12 @@ +package datagateway + +import "context" + +type Tx interface { + // Commit commits the DB transaction. All changes made after Begin() will be persisted. Calling Commit() will close the current transaction. + // If Commit() is called without a prior Begin(), it must be a no-op. + Commit(ctx context.Context) error + // Rollback rolls back the DB transaction. All changes made after Begin() will be discarded. + // Rollback() must be safe to call even if no transaction is active. Hence, a defer Rollback() is safe, even if Commit() was called prior with non-error conditions. + Rollback(ctx context.Context) error +} diff --git a/modules/nodesale/delegate.go b/modules/nodesale/delegate.go new file mode 100644 index 0000000..528514e --- /dev/null +++ b/modules/nodesale/delegate.go @@ -0,0 +1,61 @@ +package nodesale + +import ( + "context" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/core/types" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + delegatevalidator "github.com/gaze-network/indexer-network/modules/nodesale/internal/validator/delegate" +) + +func (p *Processor) ProcessDelegate(ctx context.Context, qtx datagateway.NodeSaleDataGatewayWithTx, block *types.Block, event NodeSaleEvent) error { + validator := delegatevalidator.New() + delegate := event.EventMessage.Delegate + + _, nodes, err := validator.NodesExist(ctx, qtx, delegate.DeployID, delegate.NodeIDs) + if err != nil { + return errors.Wrap(err, "Cannot query") + } + + for _, node := range nodes { + valid := validator.EqualXonlyPublicKey(node.OwnerPublicKey, event.TxPubkey) + if !valid { + break + } + } + + err = qtx.CreateEvent(ctx, entity.NodeSaleEvent{ + TxHash: event.Transaction.TxHash.String(), + TxIndex: int32(event.Transaction.Index), + Action: int32(event.EventMessage.Action), + RawMessage: event.RawData, + ParsedMessage: event.EventJson, + BlockTimestamp: block.Header.Timestamp, + BlockHash: event.Transaction.BlockHash.String(), + BlockHeight: event.Transaction.BlockHeight, + Valid: validator.Valid, + WalletAddress: p.PubkeyToPkHashAddress(event.TxPubkey).EncodeAddress(), + Metadata: nil, + Reason: validator.Reason, + }) + if err != nil { + return errors.Wrap(err, "Failed to insert event") + } + + if validator.Valid { + _, err = qtx.SetDelegates(ctx, datagateway.SetDelegatesParams{ + SaleBlock: delegate.DeployID.Block, + SaleTxIndex: int32(delegate.DeployID.TxIndex), + Delegatee: delegate.DelegateePublicKey, + DelegateTxHash: event.Transaction.TxHash.String(), + NodeIds: delegate.NodeIDs, + }) + if err != nil { + return errors.Wrap(err, "Failed to set delegate") + } + } + + return nil +} diff --git a/modules/nodesale/delegate_test.go b/modules/nodesale/delegate_test.go new file mode 100644 index 0000000..fff91fc --- /dev/null +++ b/modules/nodesale/delegate_test.go @@ -0,0 +1,84 @@ +package nodesale + +import ( + "context" + "encoding/hex" + "testing" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/gaze-network/indexer-network/common" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway/mocks" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + "github.com/gaze-network/indexer-network/modules/nodesale/protobuf" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" +) + +func TestDelegate(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + delegateePrivateKey, _ := btcec.NewPrivateKey() + delegateePubkeyHex := hex.EncodeToString(delegateePrivateKey.PubKey().SerializeCompressed()) + + delegateMessage := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_DELEGATE, + Delegate: &protobuf.ActionDelegate{ + DelegateePublicKey: delegateePubkeyHex, + NodeIDs: []uint32{9, 10}, + DeployID: &protobuf.ActionID{ + Block: uint64(testBlockHeight) - 2, + TxIndex: uint32(testTxIndex) - 2, + }, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "131313131313", "131313131313", 0, 0, delegateMessage) + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == true + })).Return(nil) + + mockDgTx.EXPECT().GetNodesByIds(mock.Anything, datagateway.GetNodesByIdsParams{ + SaleBlock: delegateMessage.Delegate.DeployID.Block, + SaleTxIndex: delegateMessage.Delegate.DeployID.TxIndex, + NodeIds: []uint32{9, 10}, + }).Return([]entity.Node{ + { + SaleBlock: delegateMessage.Delegate.DeployID.Block, + SaleTxIndex: delegateMessage.Delegate.DeployID.TxIndex, + NodeID: 9, + TierIndex: 1, + DelegatedTo: "", + OwnerPublicKey: buyerPubkeyHex, + PurchaseTxHash: mock.Anything, + DelegateTxHash: "", + }, + { + SaleBlock: delegateMessage.Delegate.DeployID.Block, + SaleTxIndex: delegateMessage.Delegate.DeployID.TxIndex, + NodeID: 10, + TierIndex: 2, + DelegatedTo: "", + OwnerPublicKey: buyerPubkeyHex, + PurchaseTxHash: mock.Anything, + DelegateTxHash: "", + }, + }, nil) + + mockDgTx.EXPECT().SetDelegates(mock.Anything, datagateway.SetDelegatesParams{ + SaleBlock: delegateMessage.Delegate.DeployID.Block, + SaleTxIndex: int32(delegateMessage.Delegate.DeployID.TxIndex), + Delegatee: delegateMessage.Delegate.DelegateePublicKey, + DelegateTxHash: event.Transaction.TxHash.String(), + NodeIds: delegateMessage.Delegate.NodeIDs, + }).Return(2, nil) + + err := p.ProcessDelegate(ctx, mockDgTx, block, event) + require.NoError(t, err) +} diff --git a/modules/nodesale/deploy.go b/modules/nodesale/deploy.go new file mode 100644 index 0000000..8cca0c3 --- /dev/null +++ b/modules/nodesale/deploy.go @@ -0,0 +1,67 @@ +package nodesale + +import ( + "context" + "time" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/core/types" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/validator" + "google.golang.org/protobuf/encoding/protojson" +) + +func (p *Processor) ProcessDeploy(ctx context.Context, qtx datagateway.NodeSaleDataGatewayWithTx, block *types.Block, event NodeSaleEvent) error { + deploy := event.EventMessage.Deploy + + validator := validator.New() + + validator.EqualXonlyPublicKey(deploy.SellerPublicKey, event.TxPubkey) + + err := qtx.CreateEvent(ctx, entity.NodeSaleEvent{ + TxHash: event.Transaction.TxHash.String(), + TxIndex: int32(event.Transaction.Index), + Action: int32(event.EventMessage.Action), + RawMessage: event.RawData, + ParsedMessage: event.EventJson, + BlockTimestamp: block.Header.Timestamp, + BlockHash: event.Transaction.BlockHash.String(), + BlockHeight: event.Transaction.BlockHeight, + Valid: validator.Valid, + WalletAddress: p.PubkeyToPkHashAddress(event.TxPubkey).EncodeAddress(), + Metadata: nil, + Reason: validator.Reason, + }) + if err != nil { + return errors.Wrap(err, "Failed to insert event") + } + if validator.Valid { + tiers := make([][]byte, len(deploy.Tiers)) + for i, tier := range deploy.Tiers { + tierJson, err := protojson.Marshal(tier) + if err != nil { + return errors.Wrap(err, "Failed to parse tiers to json") + } + tiers[i] = tierJson + } + err = qtx.CreateNodeSale(ctx, entity.NodeSale{ + BlockHeight: uint64(event.Transaction.BlockHeight), + TxIndex: event.Transaction.Index, + Name: deploy.Name, + StartsAt: time.Unix(int64(deploy.StartsAt), 0), + EndsAt: time.Unix(int64(deploy.EndsAt), 0), + Tiers: tiers, + SellerPublicKey: deploy.SellerPublicKey, + MaxPerAddress: deploy.MaxPerAddress, + DeployTxHash: event.Transaction.TxHash.String(), + MaxDiscountPercentage: int32(deploy.MaxDiscountPercentage), + SellerWallet: deploy.SellerWallet, + }) + if err != nil { + return errors.Wrap(err, "Failed to insert NodeSale") + } + } + + return nil +} diff --git a/modules/nodesale/deploy_test.go b/modules/nodesale/deploy_test.go new file mode 100644 index 0000000..3b70086 --- /dev/null +++ b/modules/nodesale/deploy_test.go @@ -0,0 +1,139 @@ +package nodesale + +import ( + "context" + "encoding/hex" + "testing" + "time" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/gaze-network/indexer-network/common" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway/mocks" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + "github.com/gaze-network/indexer-network/modules/nodesale/protobuf" + "github.com/samber/lo" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/encoding/protojson" +) + +func TestDeployInvalid(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + prvKey, err := btcec.NewPrivateKey() + require.NoError(t, err) + + strangerKey, err := btcec.NewPrivateKey() + require.NoError(t, err) + + strangerPubkeyHex := hex.EncodeToString(strangerKey.PubKey().SerializeCompressed()) + + sellerWallet := p.PubkeyToPkHashAddress(prvKey.PubKey()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_DEPLOY, + Deploy: &protobuf.ActionDeploy{ + Name: t.Name(), + StartsAt: 100, + EndsAt: 200, + Tiers: []*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 5, + MaxPerAddress: 100, + }, + }, + SellerPublicKey: strangerPubkeyHex, + MaxPerAddress: 100, + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + } + + event, block := assembleTestEvent(prvKey, "0101010101", "0101010101", 0, 0, message) + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false + })).Return(nil) + + err = p.ProcessDeploy(ctx, mockDgTx, block, event) + require.NoError(t, err) + + mockDgTx.AssertNotCalled(t, "CreateNodeSale") +} + +func TestDeployValid(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + privateKey, err := btcec.NewPrivateKey() + require.NoError(t, err) + + pubkeyHex := hex.EncodeToString(privateKey.PubKey().SerializeCompressed()) + + sellerWallet := p.PubkeyToPkHashAddress(privateKey.PubKey()) + + startAt := time.Now().Add(time.Hour * -1) + endAt := time.Now().Add(time.Hour * 1) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_DEPLOY, + Deploy: &protobuf.ActionDeploy{ + Name: t.Name(), + StartsAt: uint32(startAt.UTC().Unix()), + EndsAt: uint32(endAt.UTC().Unix()), + Tiers: []*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 5, + MaxPerAddress: 100, + }, + }, + SellerPublicKey: pubkeyHex, + MaxPerAddress: 100, + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + } + + event, block := assembleTestEvent(privateKey, "0202020202", "0202020202", 0, 0, message) + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == true + })).Return(nil) + + tiers := lo.Map(message.Deploy.Tiers, func(tier *protobuf.Tier, _ int) []byte { + tierJson, err := protojson.Marshal(tier) + require.NoError(t, err) + return tierJson + }) + + mockDgTx.EXPECT().CreateNodeSale(mock.Anything, entity.NodeSale{ + BlockHeight: uint64(event.Transaction.BlockHeight), + TxIndex: uint32(event.Transaction.Index), + Name: message.Deploy.Name, + StartsAt: time.Unix(int64(message.Deploy.StartsAt), 0), + EndsAt: time.Unix(int64(message.Deploy.EndsAt), 0), + Tiers: tiers, + SellerPublicKey: message.Deploy.SellerPublicKey, + MaxPerAddress: message.Deploy.MaxPerAddress, + DeployTxHash: event.Transaction.TxHash.String(), + MaxDiscountPercentage: int32(message.Deploy.MaxDiscountPercentage), + SellerWallet: message.Deploy.SellerWallet, + }).Return(nil) + + p.ProcessDeploy(ctx, mockDgTx, block, event) +} diff --git a/modules/nodesale/internal/entity/entity.go b/modules/nodesale/internal/entity/entity.go new file mode 100644 index 0000000..5275a15 --- /dev/null +++ b/modules/nodesale/internal/entity/entity.go @@ -0,0 +1,55 @@ +package entity + +import "time" + +type Block struct { + BlockHeight int64 + BlockHash string + Module string +} + +type Node struct { + SaleBlock uint64 + SaleTxIndex uint32 + NodeID uint32 + TierIndex int32 + DelegatedTo string + OwnerPublicKey string + PurchaseTxHash string + DelegateTxHash string +} + +type NodeSale struct { + BlockHeight uint64 + TxIndex uint32 + Name string + StartsAt time.Time + EndsAt time.Time + Tiers [][]byte + SellerPublicKey string + MaxPerAddress uint32 + DeployTxHash string + MaxDiscountPercentage int32 + SellerWallet string +} + +type NodeSaleEvent struct { + TxHash string + BlockHeight int64 + TxIndex int32 + WalletAddress string + Valid bool + Action int32 + RawMessage []byte + ParsedMessage []byte + BlockTimestamp time.Time + BlockHash string + Metadata *MetadataEventPurchase + Reason string +} + +type MetadataEventPurchase struct { + ExpectedTotalAmountDiscounted uint64 + ReportedTotalAmount uint64 + PaidTotalAmount uint64 +} diff --git a/modules/nodesale/internal/validator/delegate/validator.go b/modules/nodesale/internal/validator/delegate/validator.go new file mode 100644 index 0000000..4925dd3 --- /dev/null +++ b/modules/nodesale/internal/validator/delegate/validator.go @@ -0,0 +1,51 @@ +package delegate + +import ( + "context" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/validator" + "github.com/gaze-network/indexer-network/modules/nodesale/protobuf" +) + +type DelegateValidator struct { + validator.Validator +} + +func New() *DelegateValidator { + v := validator.New() + return &DelegateValidator{ + Validator: *v, + } +} + +func (v *DelegateValidator) NodesExist( + ctx context.Context, + qtx datagateway.NodeSaleDataGatewayWithTx, + deployId *protobuf.ActionID, + nodeIds []uint32, +) (bool, []entity.Node, error) { + if !v.Valid { + return false, nil, nil + } + + nodes, err := qtx.GetNodesByIds(ctx, datagateway.GetNodesByIdsParams{ + SaleBlock: deployId.Block, + SaleTxIndex: deployId.TxIndex, + NodeIds: nodeIds, + }) + if err != nil { + v.Valid = false + return v.Valid, nil, errors.Wrap(err, "Failed to get nodes") + } + + if len(nodeIds) != len(nodes) { + v.Valid = false + return v.Valid, nil, nil + } + + v.Valid = true + return v.Valid, nodes, nil +} diff --git a/modules/nodesale/internal/validator/errors.go b/modules/nodesale/internal/validator/errors.go new file mode 100644 index 0000000..7e24b6a --- /dev/null +++ b/modules/nodesale/internal/validator/errors.go @@ -0,0 +1,6 @@ +package validator + +const ( + INVALID_PUBKEY_FORMAT = "Cannot parse public key" + INVALID_PUBKEY = "Invalid public key" +) diff --git a/modules/nodesale/internal/validator/purchase/errors.go b/modules/nodesale/internal/validator/purchase/errors.go new file mode 100644 index 0000000..2f62fd4 --- /dev/null +++ b/modules/nodesale/internal/validator/purchase/errors.go @@ -0,0 +1,17 @@ +package purchase + +const ( + DEPLOYID_NOT_FOUND = "Depoloy ID not found." + PURCHASE_TIMEOUT = "Purchase timeout." + BLOCK_HEIGHT_TIMEOUT = "Block height over timeout block" + INVALID_SIGNATURE_FORMAT = "Cannot parse signature." + INVALID_SIGNATURE = "Invalid Signature." + INVALID_TIER_JSON = "Invalid Tier format" + INVALID_NODE_ID = "Invalid NodeId." + NODE_ALREADY_PURCHASED = "Some node has been purchased." + INVALID_SELLER_ADDR_FORMAT = "Invalid seller address." + INVALID_PAYMENT = "Total amount paid less than reported price" + INSUFFICIENT_FUND = "Insufficient fund" + OVER_LIMIT_PER_ADDR = "Purchase over limit per address." + OVER_LIMIT_PER_TIER = "Purchase over limit per tier." +) diff --git a/modules/nodesale/internal/validator/purchase/validator.go b/modules/nodesale/internal/validator/purchase/validator.go new file mode 100644 index 0000000..277ec30 --- /dev/null +++ b/modules/nodesale/internal/validator/purchase/validator.go @@ -0,0 +1,283 @@ +package purchase + +import ( + "context" + "encoding/hex" + "slices" + "time" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/btcsuite/btcd/btcec/v2/ecdsa" + "github.com/btcsuite/btcd/chaincfg" + "github.com/btcsuite/btcd/chaincfg/chainhash" + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/validator" + "github.com/gaze-network/indexer-network/modules/nodesale/protobuf" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" +) + +type PurchaseValidator struct { + validator.Validator +} + +func New() *PurchaseValidator { + v := validator.New() + return &PurchaseValidator{ + Validator: *v, + } +} + +func (v *PurchaseValidator) NodeSaleExists(ctx context.Context, qtx datagateway.NodeSaleDataGatewayWithTx, payload *protobuf.PurchasePayload) (bool, *entity.NodeSale, error) { + if !v.Valid { + return false, nil, nil + } + // check node existed + deploys, err := qtx.GetNodeSale(ctx, datagateway.GetNodeSaleParams{ + BlockHeight: payload.DeployID.Block, + TxIndex: payload.DeployID.TxIndex, + }) + if err != nil { + v.Valid = false + return v.Valid, nil, errors.Wrap(err, "Failed to Get NodeSale") + } + if len(deploys) < 1 { + v.Valid = false + v.Reason = DEPLOYID_NOT_FOUND + return v.Valid, nil, nil + } + v.Valid = true + return v.Valid, &deploys[0], nil +} + +func (v *PurchaseValidator) ValidTimestamp(deploy *entity.NodeSale, timestamp time.Time) bool { + if !v.Valid { + return false + } + if timestamp.Before(deploy.StartsAt) || + timestamp.After(deploy.EndsAt) { + v.Valid = false + v.Reason = PURCHASE_TIMEOUT + return v.Valid + } + v.Valid = true + return v.Valid +} + +func (v *PurchaseValidator) WithinTimeoutBlock(timeOutBlock uint64, blockHeight uint64) bool { + if !v.Valid { + return false + } + if timeOutBlock == 0 { + // No timeout + v.Valid = true + return v.Valid + } + if timeOutBlock < blockHeight { + v.Valid = false + v.Reason = BLOCK_HEIGHT_TIMEOUT + return v.Valid + } + v.Valid = true + return v.Valid +} + +func (v *PurchaseValidator) VerifySignature(purchase *protobuf.ActionPurchase, deploy *entity.NodeSale) bool { + if !v.Valid { + return false + } + payload := purchase.Payload + payloadBytes, _ := proto.Marshal(payload) + signatureBytes, _ := hex.DecodeString(purchase.SellerSignature) + signature, err := ecdsa.ParseSignature(signatureBytes) + if err != nil { + v.Valid = false + v.Reason = INVALID_SIGNATURE_FORMAT + return v.Valid + } + hash := chainhash.DoubleHashB(payloadBytes) + pubkeyBytes, _ := hex.DecodeString(deploy.SellerPublicKey) + pubKey, _ := btcec.ParsePubKey(pubkeyBytes) + verified := signature.Verify(hash[:], pubKey) + if !verified { + v.Valid = false + v.Reason = INVALID_SIGNATURE + return v.Valid + } + v.Valid = true + return v.Valid +} + +type TierMap struct { + Tiers []protobuf.Tier + BuyingTiersCount []uint32 + NodeIdToTier map[uint32]int32 +} + +func (v *PurchaseValidator) ValidTiers( + payload *protobuf.PurchasePayload, + deploy *entity.NodeSale, +) (bool, TierMap) { + if !v.Valid { + return false, TierMap{} + } + tiers := make([]protobuf.Tier, len(deploy.Tiers)) + buyingTiersCount := make([]uint32, len(tiers)) + nodeIdToTier := make(map[uint32]int32) + + for i, tierJson := range deploy.Tiers { + tier := &tiers[i] + err := protojson.Unmarshal(tierJson, tier) + if err != nil { + v.Valid = false + v.Reason = INVALID_TIER_JSON + return v.Valid, TierMap{} + } + } + + slices.Sort(payload.NodeIDs) + + var currentTier int32 = -1 + var tierSum uint32 = 0 + for _, nodeId := range payload.NodeIDs { + for nodeId >= tierSum && currentTier < int32(len(tiers)-1) { + currentTier++ + tierSum += tiers[currentTier].Limit + } + if nodeId < tierSum { + buyingTiersCount[currentTier]++ + nodeIdToTier[nodeId] = currentTier + } else { + v.Valid = false + v.Reason = INVALID_NODE_ID + return false, TierMap{} + } + } + v.Valid = true + return v.Valid, TierMap{ + Tiers: tiers, + BuyingTiersCount: buyingTiersCount, + NodeIdToTier: nodeIdToTier, + } +} + +func (v *PurchaseValidator) ValidUnpurchasedNodes( + ctx context.Context, + qtx datagateway.NodeSaleDataGatewayWithTx, + payload *protobuf.PurchasePayload, +) (bool, error) { + if !v.Valid { + return false, nil + } + + // valid unpurchased node ID + nodes, err := qtx.GetNodesByIds(ctx, datagateway.GetNodesByIdsParams{ + SaleBlock: payload.DeployID.Block, + SaleTxIndex: payload.DeployID.TxIndex, + NodeIds: payload.NodeIDs, + }) + if err != nil { + v.Valid = false + return v.Valid, errors.Wrap(err, "Failed to Get nodes") + } + if len(nodes) > 0 { + v.Valid = false + v.Reason = NODE_ALREADY_PURCHASED + return false, nil + } + v.Valid = true + return true, nil +} + +func (v *PurchaseValidator) ValidPaidAmount( + payload *protobuf.PurchasePayload, + deploy *entity.NodeSale, + txPaid uint64, + tiers []protobuf.Tier, + buyingTiersCount []uint32, + network *chaincfg.Params, +) (bool, *entity.MetadataEventPurchase) { + if !v.Valid { + return false, nil + } + + meta := entity.MetadataEventPurchase{} + + meta.PaidTotalAmount = txPaid + meta.ReportedTotalAmount = uint64(payload.TotalAmountSat) + // total amount paid is greater than report paid + if txPaid < uint64(payload.TotalAmountSat) { + v.Valid = false + v.Reason = INVALID_PAYMENT + return v.Valid, nil + } + // calculate total price + var totalPrice uint64 = 0 + for i := 0; i < len(tiers); i++ { + totalPrice += uint64(buyingTiersCount[i] * tiers[i].PriceSat) + } + // report paid is greater than max discounted total price + maxDiscounted := totalPrice * (100 - uint64(deploy.MaxDiscountPercentage)) + decimal := maxDiscounted % 100 + maxDiscounted /= 100 + if decimal%100 >= 50 { + maxDiscounted++ + } + meta.ExpectedTotalAmountDiscounted = maxDiscounted + if uint64(payload.TotalAmountSat) < maxDiscounted { + v.Valid = false + v.Reason = INSUFFICIENT_FUND + return v.Valid, nil + } + v.Valid = true + return v.Valid, &meta +} + +func (v *PurchaseValidator) WithinLimit( + ctx context.Context, + qtx datagateway.NodeSaleDataGatewayWithTx, + payload *protobuf.PurchasePayload, + deploy *entity.NodeSale, + tiers []protobuf.Tier, + buyingTiersCount []uint32, +) (bool, error) { + if !v.Valid { + return false, nil + } + + // check node limit + // get all selled by seller and owned by buyer + buyerOwnedNodes, err := qtx.GetNodesByOwner(ctx, datagateway.GetNodesByOwnerParams{ + SaleBlock: deploy.BlockHeight, + SaleTxIndex: deploy.TxIndex, + OwnerPublicKey: payload.BuyerPublicKey, + }) + if err != nil { + v.Valid = false + return v.Valid, errors.Wrap(err, "Failed to GetNodesByOwner") + } + if len(buyerOwnedNodes)+len(payload.NodeIDs) > int(deploy.MaxPerAddress) { + v.Valid = false + v.Reason = "Purchase over limit per address." + return v.Valid, nil + } + + // check limit + // count each tiers + // check limited for each tier + ownedTiersCount := make([]uint32, len(tiers)) + for _, node := range buyerOwnedNodes { + ownedTiersCount[node.TierIndex]++ + } + for i := 0; i < len(tiers); i++ { + if ownedTiersCount[i]+buyingTiersCount[i] > tiers[i].MaxPerAddress { + v.Valid = false + v.Reason = "Purchase over limit per tier." + return v.Valid, nil + } + } + v.Valid = true + return v.Valid, nil +} diff --git a/modules/nodesale/internal/validator/validator.go b/modules/nodesale/internal/validator/validator.go new file mode 100644 index 0000000..c274b45 --- /dev/null +++ b/modules/nodesale/internal/validator/validator.go @@ -0,0 +1,44 @@ +package validator + +import ( + "bytes" + "encoding/hex" + + "github.com/btcsuite/btcd/btcec/v2" +) + +type Validator struct { + Valid bool + Reason string +} + +func New() *Validator { + return &Validator{ + Valid: true, + } +} + +func (v *Validator) EqualXonlyPublicKey(target string, expected *btcec.PublicKey) bool { + if !v.Valid { + return false + } + targetBytes, err := hex.DecodeString(target) + if err != nil { + v.Valid = false + v.Reason = INVALID_PUBKEY_FORMAT + } + + targetPubKey, err := btcec.ParsePubKey(targetBytes) + if err != nil { + v.Valid = false + v.Reason = INVALID_PUBKEY_FORMAT + } + xOnlyTargetPubKey := btcec.ToSerialized(targetPubKey).SchnorrSerialized() + xOnlyExpectedPubKey := btcec.ToSerialized(expected).SchnorrSerialized() + + v.Valid = bytes.Equal(xOnlyTargetPubKey[:], xOnlyExpectedPubKey[:]) + if !v.Valid { + v.Reason = INVALID_PUBKEY + } + return v.Valid +} diff --git a/modules/nodesale/nodesale.go b/modules/nodesale/nodesale.go new file mode 100644 index 0000000..ca686a8 --- /dev/null +++ b/modules/nodesale/nodesale.go @@ -0,0 +1,61 @@ +package nodesale + +import ( + "context" + "fmt" + + "github.com/btcsuite/btcd/rpcclient" + "github.com/gaze-network/indexer-network/core/datasources" + "github.com/gaze-network/indexer-network/core/indexer" + "github.com/gaze-network/indexer-network/internal/config" + "github.com/gaze-network/indexer-network/internal/postgres" + "github.com/gaze-network/indexer-network/modules/nodesale/api/httphandler" + repository "github.com/gaze-network/indexer-network/modules/nodesale/repository/postgres" + "github.com/gaze-network/indexer-network/pkg/logger" + "github.com/gofiber/fiber/v2" + "github.com/samber/do/v2" +) + +var NODESALE_MAGIC = []byte{0x6e, 0x73, 0x6f, 0x70} + +const ( + Version = "v0.0.1-alpha" +) + +func New(injector do.Injector) (indexer.IndexerWorker, error) { + ctx := do.MustInvoke[context.Context](injector) + conf := do.MustInvoke[config.Config](injector) + + btcClient := do.MustInvoke[*rpcclient.Client](injector) + datasource := datasources.NewBitcoinNode(btcClient) + + pg, err := postgres.NewPool(ctx, conf.Modules.NodeSale.Postgres) + if err != nil { + return nil, fmt.Errorf("Can't create postgres connection : %w", err) + } + var cleanupFuncs []func(context.Context) error + cleanupFuncs = append(cleanupFuncs, func(ctx context.Context) error { + pg.Close() + return nil + }) + repository := repository.NewRepository(pg) + + processor := &Processor{ + NodeSaleDg: repository, + BtcClient: datasource, + Network: conf.Network, + cleanupFuncs: cleanupFuncs, + lastBlockDefault: conf.Modules.NodeSale.LastBlockDefault, + } + + httpServer := do.MustInvoke[*fiber.App](injector) + nodeSaleHandler := httphandler.New(repository) + if err := nodeSaleHandler.Mount(httpServer); err != nil { + return nil, fmt.Errorf("Can't mount nodesale API : %w", err) + } + logger.InfoContext(ctx, "Mounted nodesale HTTP handler") + + indexer := indexer.New(processor, datasource) + logger.InfoContext(ctx, "NodeSale module started.") + return indexer, nil +} diff --git a/modules/nodesale/nodesale_test.go b/modules/nodesale/nodesale_test.go new file mode 100644 index 0000000..eca8c4b --- /dev/null +++ b/modules/nodesale/nodesale_test.go @@ -0,0 +1,61 @@ +package nodesale + +import ( + "time" + + "github.com/btcsuite/btcd/chaincfg/chainhash" + "github.com/btcsuite/btcd/txscript" + "github.com/decred/dcrd/dcrec/secp256k1/v4" + "github.com/gaze-network/indexer-network/core/types" + "github.com/gaze-network/indexer-network/modules/nodesale/protobuf" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" +) + +var ( + testBlockHeight uint64 = 101 + testTxIndex uint32 = 1 +) + +func assembleTestEvent(privateKey *secp256k1.PrivateKey, blockHashHex, txHashHex string, blockHeight uint64, txIndex uint32, message *protobuf.NodeSaleEvent) (NodeSaleEvent, *types.Block) { + blockHash, _ := chainhash.NewHashFromStr(blockHashHex) + txHash, _ := chainhash.NewHashFromStr(txHashHex) + + rawData, _ := proto.Marshal(message) + + builder := txscript.NewScriptBuilder() + builder.AddOp(txscript.OP_FALSE) + builder.AddOp(txscript.OP_IF) + builder.AddData(rawData) + builder.AddOp(txscript.OP_ENDIF) + + messageJson, _ := protojson.Marshal(message) + + if blockHeight == 0 { + blockHeight = testBlockHeight + testBlockHeight++ + } + if txIndex == 0 { + txIndex = testTxIndex + testTxIndex++ + } + + event := NodeSaleEvent{ + Transaction: &types.Transaction{ + BlockHeight: int64(blockHeight), + BlockHash: *blockHash, + Index: uint32(txIndex), + TxHash: *txHash, + }, + RawData: rawData, + EventMessage: message, + EventJson: messageJson, + TxPubkey: privateKey.PubKey(), + } + block := &types.Block{ + Header: types.BlockHeader{ + Timestamp: time.Now().UTC(), + }, + } + return event, block +} diff --git a/modules/nodesale/processor.go b/modules/nodesale/processor.go new file mode 100644 index 0000000..2db48d9 --- /dev/null +++ b/modules/nodesale/processor.go @@ -0,0 +1,303 @@ +package nodesale + +import ( + "bytes" + "context" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/btcsuite/btcd/chaincfg/chainhash" + "github.com/btcsuite/btcd/txscript" + "github.com/gaze-network/indexer-network/common" + "github.com/gaze-network/indexer-network/core/indexer" + "github.com/gaze-network/indexer-network/core/types" + "github.com/gaze-network/indexer-network/pkg/logger" + "github.com/gaze-network/indexer-network/pkg/logger/slogx" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/core/datasources" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + "github.com/gaze-network/indexer-network/modules/nodesale/protobuf" +) + +type NodeSaleEvent struct { + Transaction *types.Transaction + EventMessage *protobuf.NodeSaleEvent + EventJson []byte + TxPubkey *btcec.PublicKey + RawData []byte + InputValue uint64 +} + +func NewProcessor(repository datagateway.NodeSaleDataGateway, + datasource *datasources.BitcoinNodeDatasource, + network common.Network, + cleanupFuncs []func(context.Context) error, + lastBlockDefault int64, +) *Processor { + return &Processor{ + NodeSaleDg: repository, + BtcClient: datasource, + Network: network, + cleanupFuncs: cleanupFuncs, + lastBlockDefault: lastBlockDefault, + } +} + +func (p *Processor) Shutdown(ctx context.Context) error { + for _, cleanupFunc := range p.cleanupFuncs { + err := cleanupFunc(ctx) + if err != nil { + return errors.Wrap(err, "cleanup function error") + } + } + return nil +} + +type Processor struct { + NodeSaleDg datagateway.NodeSaleDataGateway + BtcClient *datasources.BitcoinNodeDatasource + Network common.Network + cleanupFuncs []func(context.Context) error + lastBlockDefault int64 +} + +// CurrentBlock implements indexer.Processor. +func (p *Processor) CurrentBlock(ctx context.Context) (types.BlockHeader, error) { + block, err := p.NodeSaleDg.GetLastProcessedBlock(ctx) + if err != nil { + logger.InfoContext(ctx, "Couldn't get last processed block. Start from NODESALE_LAST_BLOCK_DEFAULT.", + slogx.Int64("currentBlock", p.lastBlockDefault)) + header, err := p.BtcClient.GetBlockHeader(ctx, p.lastBlockDefault) + if err != nil { + return types.BlockHeader{}, errors.Wrap(err, "Cannot get default block from bitcoin node") + } + return types.BlockHeader{ + Hash: header.Hash, + Height: p.lastBlockDefault, + }, nil + } + + hash, err := chainhash.NewHashFromStr(block.BlockHash) + if err != nil { + logger.PanicContext(ctx, "Invalid hash format found in Database.") + } + return types.BlockHeader{ + Hash: *hash, + Height: block.BlockHeight, + }, nil +} + +// GetIndexedBlock implements indexer.Processor. +func (p *Processor) GetIndexedBlock(ctx context.Context, height int64) (types.BlockHeader, error) { + block, err := p.NodeSaleDg.GetBlock(ctx, height) + if err != nil { + return types.BlockHeader{}, errors.Wrapf(err, "Block %d not found", height) + } + hash, err := chainhash.NewHashFromStr(block.BlockHash) + if err != nil { + logger.PanicContext(ctx, "Invalid hash format found in Database.") + } + return types.BlockHeader{ + Hash: *hash, + Height: block.BlockHeight, + }, nil +} + +// Name implements indexer.Processor. +func (p *Processor) Name() string { + return "nodesale" +} + +func extractNodeSaleData(witness [][]byte) (data []byte, internalPubkey *btcec.PublicKey, isNodeSale bool) { + tokenizer, controlBlock, isTapScript := extractTapScript(witness) + if !isTapScript { + return []byte{}, nil, false + } + state := 0 + for tokenizer.Next() { + switch state { + case 0: + if tokenizer.Opcode() == txscript.OP_0 { + state++ + } else { + state = 0 + } + case 1: + if tokenizer.Opcode() == txscript.OP_IF { + state++ + } else { + state = 0 + } + case 2: + if tokenizer.Opcode() == txscript.OP_DATA_4 && + bytes.Equal(tokenizer.Data(), NODESALE_MAGIC) { + state++ + } else { + state = 0 + } + case 3: + // Any instruction > txscript.OP_16 is not push data. Note: txscript.OP_PUSHDATAX < txscript.OP_16 + if tokenizer.Opcode() <= txscript.OP_16 { + data := tokenizer.Data() + return data, controlBlock.InternalKey, true + } + state = 0 + } + } + return []byte{}, nil, false +} + +func (p *Processor) parseTransactions(ctx context.Context, transactions []*types.Transaction) ([]NodeSaleEvent, error) { + var events []NodeSaleEvent + for _, t := range transactions { + for _, txIn := range t.TxIn { + data, txPubkey, isNodeSale := extractNodeSaleData(txIn.Witness) + if !isNodeSale { + continue + } + + event := &protobuf.NodeSaleEvent{} + err := proto.Unmarshal(data, event) + if err != nil { + logger.WarnContext(ctx, "Invalid Protobuf", + slogx.String("block_hash", t.BlockHash.String()), + slogx.Int("txIndex", int(t.Index))) + continue + } + eventJson, err := protojson.Marshal(event) + if err != nil { + return []NodeSaleEvent{}, errors.Wrap(err, "Failed to parse protobuf to json") + } + + prevTx, _, err := p.BtcClient.GetRawTransactionAndHeightByTxHash(ctx, txIn.PreviousOutTxHash) + if err != nil { + return nil, errors.Wrap(err, "Failed to get Previous transaction data") + } + + if txIn.PreviousOutIndex >= uint32(len(prevTx.TxOut)) { + return nil, errors.Wrap(err, "Invalid previous transaction from bitcoin") + } + + events = append(events, NodeSaleEvent{ + Transaction: t, + EventMessage: event, + EventJson: eventJson, + RawData: data, + TxPubkey: txPubkey, + InputValue: uint64(prevTx.TxOut[txIn.PreviousOutIndex].Value), + }) + } + } + return events, nil +} + +// Process implements indexer.Processor. +func (p *Processor) Process(ctx context.Context, inputs []*types.Block) error { + for _, block := range inputs { + logger.InfoContext(ctx, "NodeSale processing a block", + slogx.Int64("block", block.Header.Height), + slogx.Stringer("hash", block.Header.Hash)) + // parse all event from each transaction including reading tx wallet + events, err := p.parseTransactions(ctx, block.Transactions) + if err != nil { + return errors.Wrap(err, "Invalid data from bitcoin client") + } + // open transaction + qtx, err := p.NodeSaleDg.BeginNodeSaleTx(ctx) + if err != nil { + return errors.Wrap(err, "Failed to create transaction") + } + defer func() { + err = qtx.Rollback(ctx) + if err != nil { + logger.PanicContext(ctx, "Failed to rollback db") + } + }() + + // write block + err = qtx.CreateBlock(ctx, entity.Block{ + BlockHeight: block.Header.Height, + BlockHash: block.Header.Hash.String(), + Module: p.Name(), + }) + if err != nil { + return errors.Wrapf(err, "Failed to add block %d", block.Header.Height) + } + // for each events + for _, event := range events { + logger.InfoContext(ctx, "NodeSale processing event", + slogx.Uint32("txIndex", event.Transaction.Index), + slogx.Int64("blockHeight", block.Header.Height), + slogx.Stringer("blockhash", block.Header.Hash), + ) + eventMessage := event.EventMessage + switch eventMessage.Action { + case protobuf.Action_ACTION_DEPLOY: + err = p.ProcessDeploy(ctx, qtx, block, event) + if err != nil { + return errors.Wrapf(err, "Failed to deploy at block %d", block.Header.Height) + } + case protobuf.Action_ACTION_DELEGATE: + err = p.ProcessDelegate(ctx, qtx, block, event) + if err != nil { + return errors.Wrapf(err, "Failed to delegate at block %d", block.Header.Height) + } + case protobuf.Action_ACTION_PURCHASE: + err = p.ProcessPurchase(ctx, qtx, block, event) + if err != nil { + return errors.Wrapf(err, "Failed to purchase at block %d", block.Header.Height) + } + default: + logger.DebugContext(ctx, "Invalid event ACTION", slogx.Stringer("txHash", (event.Transaction.TxHash))) + } + } + // close transaction + err = qtx.Commit(ctx) + if err != nil { + return errors.Wrap(err, "Failed to commit transaction") + } + logger.InfoContext(ctx, "NodeSale finished processing block", + slogx.Int64("block", block.Header.Height), + slogx.Stringer("hash", block.Header.Hash)) + } + return nil +} + +// RevertData implements indexer.Processor. +func (p *Processor) RevertData(ctx context.Context, from int64) error { + qtx, err := p.NodeSaleDg.BeginNodeSaleTx(ctx) + if err != nil { + return errors.Wrap(err, "Failed to create transaction") + } + defer func() { err = qtx.Rollback(ctx) }() + _, err = qtx.RemoveBlockFrom(ctx, from) + if err != nil { + return errors.Wrap(err, "Failed to remove blocks.") + } + + affected, err := qtx.RemoveEventsFromBlock(ctx, from) + if err != nil { + return errors.Wrap(err, "Failed to remove events.") + } + _, err = qtx.ClearDelegate(ctx) + if err != nil { + return errors.Wrap(err, "Failed to clear delegate from nodes") + } + err = qtx.Commit(ctx) + if err != nil { + return errors.Wrap(err, "Failed to commit transaction") + } + logger.InfoContext(ctx, "Events removed", + slogx.Int64("Total removed", affected)) + return nil +} + +// VerifyStates implements indexer.Processor. +func (p *Processor) VerifyStates(ctx context.Context) error { + panic("unimplemented") +} + +var _ indexer.Processor[*types.Block] = (*Processor)(nil) diff --git a/modules/nodesale/protobuf/nodesale.pb.go b/modules/nodesale/protobuf/nodesale.pb.go new file mode 100644 index 0000000..0ac84bd --- /dev/null +++ b/modules/nodesale/protobuf/nodesale.pb.go @@ -0,0 +1,806 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.34.1 +// protoc v5.26.1 +// source: modules/nodesale/protobuf/nodesale.proto + +// protoc modules/nodesale/protobuf/nodesale.proto --go_out=. --go_opt=module=github.com/gaze-network/indexer-network + +package protobuf + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Action int32 + +const ( + Action_ACTION_DEPLOY Action = 0 + Action_ACTION_PURCHASE Action = 1 + Action_ACTION_DELEGATE Action = 2 +) + +// Enum value maps for Action. +var ( + Action_name = map[int32]string{ + 0: "ACTION_DEPLOY", + 1: "ACTION_PURCHASE", + 2: "ACTION_DELEGATE", + } + Action_value = map[string]int32{ + "ACTION_DEPLOY": 0, + "ACTION_PURCHASE": 1, + "ACTION_DELEGATE": 2, + } +) + +func (x Action) Enum() *Action { + p := new(Action) + *p = x + return p +} + +func (x Action) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Action) Descriptor() protoreflect.EnumDescriptor { + return file_modules_nodesale_protobuf_nodesale_proto_enumTypes[0].Descriptor() +} + +func (Action) Type() protoreflect.EnumType { + return &file_modules_nodesale_protobuf_nodesale_proto_enumTypes[0] +} + +func (x Action) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Action.Descriptor instead. +func (Action) EnumDescriptor() ([]byte, []int) { + return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{0} +} + +type NodeSaleEvent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Action Action `protobuf:"varint,1,opt,name=action,proto3,enum=nodesale.Action" json:"action,omitempty"` + Deploy *ActionDeploy `protobuf:"bytes,2,opt,name=deploy,proto3,oneof" json:"deploy,omitempty"` + Purchase *ActionPurchase `protobuf:"bytes,3,opt,name=purchase,proto3,oneof" json:"purchase,omitempty"` + Delegate *ActionDelegate `protobuf:"bytes,4,opt,name=delegate,proto3,oneof" json:"delegate,omitempty"` +} + +func (x *NodeSaleEvent) Reset() { + *x = NodeSaleEvent{} + if protoimpl.UnsafeEnabled { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *NodeSaleEvent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*NodeSaleEvent) ProtoMessage() {} + +func (x *NodeSaleEvent) ProtoReflect() protoreflect.Message { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use NodeSaleEvent.ProtoReflect.Descriptor instead. +func (*NodeSaleEvent) Descriptor() ([]byte, []int) { + return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{0} +} + +func (x *NodeSaleEvent) GetAction() Action { + if x != nil { + return x.Action + } + return Action_ACTION_DEPLOY +} + +func (x *NodeSaleEvent) GetDeploy() *ActionDeploy { + if x != nil { + return x.Deploy + } + return nil +} + +func (x *NodeSaleEvent) GetPurchase() *ActionPurchase { + if x != nil { + return x.Purchase + } + return nil +} + +func (x *NodeSaleEvent) GetDelegate() *ActionDelegate { + if x != nil { + return x.Delegate + } + return nil +} + +type ActionDeploy struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + StartsAt uint32 `protobuf:"varint,2,opt,name=startsAt,proto3" json:"startsAt,omitempty"` + EndsAt uint32 `protobuf:"varint,3,opt,name=endsAt,proto3" json:"endsAt,omitempty"` + Tiers []*Tier `protobuf:"bytes,4,rep,name=tiers,proto3" json:"tiers,omitempty"` + SellerPublicKey string `protobuf:"bytes,5,opt,name=sellerPublicKey,proto3" json:"sellerPublicKey,omitempty"` + MaxPerAddress uint32 `protobuf:"varint,6,opt,name=maxPerAddress,proto3" json:"maxPerAddress,omitempty"` + MaxDiscountPercentage uint32 `protobuf:"varint,7,opt,name=maxDiscountPercentage,proto3" json:"maxDiscountPercentage,omitempty"` + SellerWallet string `protobuf:"bytes,8,opt,name=sellerWallet,proto3" json:"sellerWallet,omitempty"` +} + +func (x *ActionDeploy) Reset() { + *x = ActionDeploy{} + if protoimpl.UnsafeEnabled { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ActionDeploy) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ActionDeploy) ProtoMessage() {} + +func (x *ActionDeploy) ProtoReflect() protoreflect.Message { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ActionDeploy.ProtoReflect.Descriptor instead. +func (*ActionDeploy) Descriptor() ([]byte, []int) { + return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{1} +} + +func (x *ActionDeploy) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ActionDeploy) GetStartsAt() uint32 { + if x != nil { + return x.StartsAt + } + return 0 +} + +func (x *ActionDeploy) GetEndsAt() uint32 { + if x != nil { + return x.EndsAt + } + return 0 +} + +func (x *ActionDeploy) GetTiers() []*Tier { + if x != nil { + return x.Tiers + } + return nil +} + +func (x *ActionDeploy) GetSellerPublicKey() string { + if x != nil { + return x.SellerPublicKey + } + return "" +} + +func (x *ActionDeploy) GetMaxPerAddress() uint32 { + if x != nil { + return x.MaxPerAddress + } + return 0 +} + +func (x *ActionDeploy) GetMaxDiscountPercentage() uint32 { + if x != nil { + return x.MaxDiscountPercentage + } + return 0 +} + +func (x *ActionDeploy) GetSellerWallet() string { + if x != nil { + return x.SellerWallet + } + return "" +} + +type Tier struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + PriceSat uint32 `protobuf:"varint,1,opt,name=priceSat,proto3" json:"priceSat,omitempty"` + Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"` + MaxPerAddress uint32 `protobuf:"varint,3,opt,name=maxPerAddress,proto3" json:"maxPerAddress,omitempty"` +} + +func (x *Tier) Reset() { + *x = Tier{} + if protoimpl.UnsafeEnabled { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Tier) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Tier) ProtoMessage() {} + +func (x *Tier) ProtoReflect() protoreflect.Message { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Tier.ProtoReflect.Descriptor instead. +func (*Tier) Descriptor() ([]byte, []int) { + return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{2} +} + +func (x *Tier) GetPriceSat() uint32 { + if x != nil { + return x.PriceSat + } + return 0 +} + +func (x *Tier) GetLimit() uint32 { + if x != nil { + return x.Limit + } + return 0 +} + +func (x *Tier) GetMaxPerAddress() uint32 { + if x != nil { + return x.MaxPerAddress + } + return 0 +} + +type ActionPurchase struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Payload *PurchasePayload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + SellerSignature string `protobuf:"bytes,2,opt,name=sellerSignature,proto3" json:"sellerSignature,omitempty"` +} + +func (x *ActionPurchase) Reset() { + *x = ActionPurchase{} + if protoimpl.UnsafeEnabled { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ActionPurchase) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ActionPurchase) ProtoMessage() {} + +func (x *ActionPurchase) ProtoReflect() protoreflect.Message { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ActionPurchase.ProtoReflect.Descriptor instead. +func (*ActionPurchase) Descriptor() ([]byte, []int) { + return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{3} +} + +func (x *ActionPurchase) GetPayload() *PurchasePayload { + if x != nil { + return x.Payload + } + return nil +} + +func (x *ActionPurchase) GetSellerSignature() string { + if x != nil { + return x.SellerSignature + } + return "" +} + +type PurchasePayload struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + DeployID *ActionID `protobuf:"bytes,1,opt,name=deployID,proto3" json:"deployID,omitempty"` + BuyerPublicKey string `protobuf:"bytes,2,opt,name=buyerPublicKey,proto3" json:"buyerPublicKey,omitempty"` + NodeIDs []uint32 `protobuf:"varint,3,rep,packed,name=nodeIDs,proto3" json:"nodeIDs,omitempty"` + TotalAmountSat int64 `protobuf:"varint,4,opt,name=totalAmountSat,proto3" json:"totalAmountSat,omitempty"` + TimeOutBlock uint64 `protobuf:"varint,5,opt,name=timeOutBlock,proto3" json:"timeOutBlock,omitempty"` +} + +func (x *PurchasePayload) Reset() { + *x = PurchasePayload{} + if protoimpl.UnsafeEnabled { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PurchasePayload) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PurchasePayload) ProtoMessage() {} + +func (x *PurchasePayload) ProtoReflect() protoreflect.Message { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PurchasePayload.ProtoReflect.Descriptor instead. +func (*PurchasePayload) Descriptor() ([]byte, []int) { + return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{4} +} + +func (x *PurchasePayload) GetDeployID() *ActionID { + if x != nil { + return x.DeployID + } + return nil +} + +func (x *PurchasePayload) GetBuyerPublicKey() string { + if x != nil { + return x.BuyerPublicKey + } + return "" +} + +func (x *PurchasePayload) GetNodeIDs() []uint32 { + if x != nil { + return x.NodeIDs + } + return nil +} + +func (x *PurchasePayload) GetTotalAmountSat() int64 { + if x != nil { + return x.TotalAmountSat + } + return 0 +} + +func (x *PurchasePayload) GetTimeOutBlock() uint64 { + if x != nil { + return x.TimeOutBlock + } + return 0 +} + +type ActionID struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Block uint64 `protobuf:"varint,1,opt,name=block,proto3" json:"block,omitempty"` + TxIndex uint32 `protobuf:"varint,2,opt,name=txIndex,proto3" json:"txIndex,omitempty"` +} + +func (x *ActionID) Reset() { + *x = ActionID{} + if protoimpl.UnsafeEnabled { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ActionID) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ActionID) ProtoMessage() {} + +func (x *ActionID) ProtoReflect() protoreflect.Message { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ActionID.ProtoReflect.Descriptor instead. +func (*ActionID) Descriptor() ([]byte, []int) { + return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{5} +} + +func (x *ActionID) GetBlock() uint64 { + if x != nil { + return x.Block + } + return 0 +} + +func (x *ActionID) GetTxIndex() uint32 { + if x != nil { + return x.TxIndex + } + return 0 +} + +type ActionDelegate struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + DelegateePublicKey string `protobuf:"bytes,1,opt,name=delegateePublicKey,proto3" json:"delegateePublicKey,omitempty"` + NodeIDs []uint32 `protobuf:"varint,2,rep,packed,name=nodeIDs,proto3" json:"nodeIDs,omitempty"` + DeployID *ActionID `protobuf:"bytes,3,opt,name=deployID,proto3" json:"deployID,omitempty"` +} + +func (x *ActionDelegate) Reset() { + *x = ActionDelegate{} + if protoimpl.UnsafeEnabled { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ActionDelegate) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ActionDelegate) ProtoMessage() {} + +func (x *ActionDelegate) ProtoReflect() protoreflect.Message { + mi := &file_modules_nodesale_protobuf_nodesale_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ActionDelegate.ProtoReflect.Descriptor instead. +func (*ActionDelegate) Descriptor() ([]byte, []int) { + return file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP(), []int{6} +} + +func (x *ActionDelegate) GetDelegateePublicKey() string { + if x != nil { + return x.DelegateePublicKey + } + return "" +} + +func (x *ActionDelegate) GetNodeIDs() []uint32 { + if x != nil { + return x.NodeIDs + } + return nil +} + +func (x *ActionDelegate) GetDeployID() *ActionID { + if x != nil { + return x.DeployID + } + return nil +} + +var File_modules_nodesale_protobuf_nodesale_proto protoreflect.FileDescriptor + +var file_modules_nodesale_protobuf_nodesale_proto_rawDesc = []byte{ + 0x0a, 0x28, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x2f, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, + 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x6e, 0x6f, 0x64, 0x65, + 0x73, 0x61, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x08, 0x6e, 0x6f, 0x64, 0x65, + 0x73, 0x61, 0x6c, 0x65, 0x22, 0x89, 0x02, 0x0a, 0x0d, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x61, 0x6c, + 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x28, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x10, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, + 0x65, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x33, 0x0a, 0x06, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x16, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2e, 0x41, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x48, 0x00, 0x52, 0x06, 0x64, 0x65, 0x70, 0x6c, + 0x6f, 0x79, 0x88, 0x01, 0x01, 0x12, 0x39, 0x0a, 0x08, 0x70, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, + 0x6c, 0x65, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73, + 0x65, 0x48, 0x01, 0x52, 0x08, 0x70, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73, 0x65, 0x88, 0x01, 0x01, + 0x12, 0x39, 0x0a, 0x08, 0x64, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2e, 0x41, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x48, 0x02, 0x52, 0x08, + 0x64, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, + 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x75, 0x72, 0x63, 0x68, + 0x61, 0x73, 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x64, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, + 0x22, 0xa6, 0x02, 0x0a, 0x0c, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x70, 0x6c, 0x6f, + 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x41, + 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x41, + 0x74, 0x12, 0x16, 0x0a, 0x06, 0x65, 0x6e, 0x64, 0x73, 0x41, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0d, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x73, 0x41, 0x74, 0x12, 0x24, 0x0a, 0x05, 0x74, 0x69, 0x65, + 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, + 0x61, 0x6c, 0x65, 0x2e, 0x54, 0x69, 0x65, 0x72, 0x52, 0x05, 0x74, 0x69, 0x65, 0x72, 0x73, 0x12, + 0x28, 0x0a, 0x0f, 0x73, 0x65, 0x6c, 0x6c, 0x65, 0x72, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, + 0x65, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x73, 0x65, 0x6c, 0x6c, 0x65, 0x72, + 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x24, 0x0a, 0x0d, 0x6d, 0x61, 0x78, + 0x50, 0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x0d, 0x6d, 0x61, 0x78, 0x50, 0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, + 0x34, 0x0a, 0x15, 0x6d, 0x61, 0x78, 0x44, 0x69, 0x73, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x50, 0x65, + 0x72, 0x63, 0x65, 0x6e, 0x74, 0x61, 0x67, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x15, + 0x6d, 0x61, 0x78, 0x44, 0x69, 0x73, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x50, 0x65, 0x72, 0x63, 0x65, + 0x6e, 0x74, 0x61, 0x67, 0x65, 0x12, 0x22, 0x0a, 0x0c, 0x73, 0x65, 0x6c, 0x6c, 0x65, 0x72, 0x57, + 0x61, 0x6c, 0x6c, 0x65, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x73, 0x65, 0x6c, + 0x6c, 0x65, 0x72, 0x57, 0x61, 0x6c, 0x6c, 0x65, 0x74, 0x22, 0x5e, 0x0a, 0x04, 0x54, 0x69, 0x65, + 0x72, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x63, 0x65, 0x53, 0x61, 0x74, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0d, 0x52, 0x08, 0x70, 0x72, 0x69, 0x63, 0x65, 0x53, 0x61, 0x74, 0x12, 0x14, 0x0a, + 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x6c, 0x69, + 0x6d, 0x69, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x6d, 0x61, 0x78, 0x50, 0x65, 0x72, 0x41, 0x64, 0x64, + 0x72, 0x65, 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x6d, 0x61, 0x78, 0x50, + 0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x22, 0x6f, 0x0a, 0x0e, 0x41, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x50, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x07, 0x70, + 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x6e, + 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2e, 0x50, 0x75, 0x72, 0x63, 0x68, 0x61, 0x73, 0x65, + 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, + 0x12, 0x28, 0x0a, 0x0f, 0x73, 0x65, 0x6c, 0x6c, 0x65, 0x72, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x73, 0x65, 0x6c, 0x6c, 0x65, + 0x72, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0xcf, 0x01, 0x0a, 0x0f, 0x50, + 0x75, 0x72, 0x63, 0x68, 0x61, 0x73, 0x65, 0x50, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x2e, + 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x12, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2e, 0x41, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x49, 0x44, 0x52, 0x08, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x49, 0x44, 0x12, 0x26, + 0x0a, 0x0e, 0x62, 0x75, 0x79, 0x65, 0x72, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x62, 0x75, 0x79, 0x65, 0x72, 0x50, 0x75, 0x62, + 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x44, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0d, 0x52, 0x07, 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x44, 0x73, + 0x12, 0x26, 0x0a, 0x0e, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x41, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x53, + 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0e, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x41, + 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x53, 0x61, 0x74, 0x12, 0x22, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65, + 0x4f, 0x75, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c, + 0x74, 0x69, 0x6d, 0x65, 0x4f, 0x75, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x3a, 0x0a, 0x08, + 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x44, 0x12, 0x14, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x18, + 0x0a, 0x07, 0x74, 0x78, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, + 0x07, 0x74, 0x78, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x8a, 0x01, 0x0a, 0x0e, 0x41, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x12, 0x2e, 0x0a, 0x12, 0x64, + 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, 0x65, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x64, 0x65, 0x6c, 0x65, 0x67, 0x61, 0x74, + 0x65, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x6e, + 0x6f, 0x64, 0x65, 0x49, 0x44, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0d, 0x52, 0x07, 0x6e, 0x6f, + 0x64, 0x65, 0x49, 0x44, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x49, + 0x44, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, + 0x6c, 0x65, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x44, 0x52, 0x08, 0x64, 0x65, 0x70, + 0x6c, 0x6f, 0x79, 0x49, 0x44, 0x2a, 0x45, 0x0a, 0x06, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x11, 0x0a, 0x0d, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x44, 0x45, 0x50, 0x4c, 0x4f, 0x59, + 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x41, 0x43, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x50, 0x55, 0x52, + 0x43, 0x48, 0x41, 0x53, 0x45, 0x10, 0x01, 0x12, 0x13, 0x0a, 0x0f, 0x41, 0x43, 0x54, 0x49, 0x4f, + 0x4e, 0x5f, 0x44, 0x45, 0x4c, 0x45, 0x47, 0x41, 0x54, 0x45, 0x10, 0x02, 0x42, 0x43, 0x5a, 0x41, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x61, 0x7a, 0x65, 0x2d, + 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x2f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x72, 0x2d, + 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x2f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x2f, + 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x61, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_modules_nodesale_protobuf_nodesale_proto_rawDescOnce sync.Once + file_modules_nodesale_protobuf_nodesale_proto_rawDescData = file_modules_nodesale_protobuf_nodesale_proto_rawDesc +) + +func file_modules_nodesale_protobuf_nodesale_proto_rawDescGZIP() []byte { + file_modules_nodesale_protobuf_nodesale_proto_rawDescOnce.Do(func() { + file_modules_nodesale_protobuf_nodesale_proto_rawDescData = protoimpl.X.CompressGZIP(file_modules_nodesale_protobuf_nodesale_proto_rawDescData) + }) + return file_modules_nodesale_protobuf_nodesale_proto_rawDescData +} + +var file_modules_nodesale_protobuf_nodesale_proto_enumTypes = make([]protoimpl.EnumInfo, 1) +var file_modules_nodesale_protobuf_nodesale_proto_msgTypes = make([]protoimpl.MessageInfo, 7) +var file_modules_nodesale_protobuf_nodesale_proto_goTypes = []interface{}{ + (Action)(0), // 0: nodesale.Action + (*NodeSaleEvent)(nil), // 1: nodesale.NodeSaleEvent + (*ActionDeploy)(nil), // 2: nodesale.ActionDeploy + (*Tier)(nil), // 3: nodesale.Tier + (*ActionPurchase)(nil), // 4: nodesale.ActionPurchase + (*PurchasePayload)(nil), // 5: nodesale.PurchasePayload + (*ActionID)(nil), // 6: nodesale.ActionID + (*ActionDelegate)(nil), // 7: nodesale.ActionDelegate +} +var file_modules_nodesale_protobuf_nodesale_proto_depIdxs = []int32{ + 0, // 0: nodesale.NodeSaleEvent.action:type_name -> nodesale.Action + 2, // 1: nodesale.NodeSaleEvent.deploy:type_name -> nodesale.ActionDeploy + 4, // 2: nodesale.NodeSaleEvent.purchase:type_name -> nodesale.ActionPurchase + 7, // 3: nodesale.NodeSaleEvent.delegate:type_name -> nodesale.ActionDelegate + 3, // 4: nodesale.ActionDeploy.tiers:type_name -> nodesale.Tier + 5, // 5: nodesale.ActionPurchase.payload:type_name -> nodesale.PurchasePayload + 6, // 6: nodesale.PurchasePayload.deployID:type_name -> nodesale.ActionID + 6, // 7: nodesale.ActionDelegate.deployID:type_name -> nodesale.ActionID + 8, // [8:8] is the sub-list for method output_type + 8, // [8:8] is the sub-list for method input_type + 8, // [8:8] is the sub-list for extension type_name + 8, // [8:8] is the sub-list for extension extendee + 0, // [0:8] is the sub-list for field type_name +} + +func init() { file_modules_nodesale_protobuf_nodesale_proto_init() } +func file_modules_nodesale_protobuf_nodesale_proto_init() { + if File_modules_nodesale_protobuf_nodesale_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_modules_nodesale_protobuf_nodesale_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*NodeSaleEvent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_modules_nodesale_protobuf_nodesale_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ActionDeploy); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_modules_nodesale_protobuf_nodesale_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Tier); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_modules_nodesale_protobuf_nodesale_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ActionPurchase); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_modules_nodesale_protobuf_nodesale_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PurchasePayload); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_modules_nodesale_protobuf_nodesale_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ActionID); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_modules_nodesale_protobuf_nodesale_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ActionDelegate); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_modules_nodesale_protobuf_nodesale_proto_msgTypes[0].OneofWrappers = []interface{}{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_modules_nodesale_protobuf_nodesale_proto_rawDesc, + NumEnums: 1, + NumMessages: 7, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_modules_nodesale_protobuf_nodesale_proto_goTypes, + DependencyIndexes: file_modules_nodesale_protobuf_nodesale_proto_depIdxs, + EnumInfos: file_modules_nodesale_protobuf_nodesale_proto_enumTypes, + MessageInfos: file_modules_nodesale_protobuf_nodesale_proto_msgTypes, + }.Build() + File_modules_nodesale_protobuf_nodesale_proto = out.File + file_modules_nodesale_protobuf_nodesale_proto_rawDesc = nil + file_modules_nodesale_protobuf_nodesale_proto_goTypes = nil + file_modules_nodesale_protobuf_nodesale_proto_depIdxs = nil +} diff --git a/modules/nodesale/protobuf/nodesale.proto b/modules/nodesale/protobuf/nodesale.proto new file mode 100644 index 0000000..7c32d7b --- /dev/null +++ b/modules/nodesale/protobuf/nodesale.proto @@ -0,0 +1,60 @@ +syntax = "proto3"; + +// protoc modules/nodesale/protobuf/nodesale.proto --go_out=. --go_opt=module=github.com/gaze-network/indexer-network + +package nodesale; +option go_package = "github.com/gaze-network/indexer-network/modules/nodesale/protobuf"; + +enum Action { + ACTION_DEPLOY = 0; + ACTION_PURCHASE = 1; + ACTION_DELEGATE = 2; +} + +message NodeSaleEvent { + Action action = 1; + optional ActionDeploy deploy = 2; + optional ActionPurchase purchase = 3; + optional ActionDelegate delegate = 4; +} + +message ActionDeploy { + string name = 1; + uint32 startsAt = 2; + uint32 endsAt = 3; + repeated Tier tiers = 4; + string sellerPublicKey = 5; + uint32 maxPerAddress = 6; + uint32 maxDiscountPercentage = 7; + string sellerWallet = 8; +} + +message Tier { + uint32 priceSat = 1; + uint32 limit = 2; + uint32 maxPerAddress = 3; +} + +message ActionPurchase { + PurchasePayload payload = 1; + string sellerSignature = 2; +} + +message PurchasePayload { + ActionID deployID = 1; + string buyerPublicKey = 2; + repeated uint32 nodeIDs = 3; + int64 totalAmountSat = 4; + uint64 timeOutBlock = 5; +} + +message ActionID { + uint64 block = 1; + uint32 txIndex = 2; +} + +message ActionDelegate { + string delegateePublicKey = 1; + repeated uint32 nodeIDs = 2; + ActionID deployID = 3; +} \ No newline at end of file diff --git a/modules/nodesale/pubkeyaddr.go b/modules/nodesale/pubkeyaddr.go new file mode 100644 index 0000000..610530b --- /dev/null +++ b/modules/nodesale/pubkeyaddr.go @@ -0,0 +1,12 @@ +package nodesale + +import ( + "github.com/btcsuite/btcd/btcec/v2" + "github.com/btcsuite/btcd/btcutil" +) + +func (p *Processor) PubkeyToPkHashAddress(pubKey *btcec.PublicKey) btcutil.Address { + addrPubKey, _ := btcutil.NewAddressPubKey(pubKey.SerializeCompressed(), p.Network.ChainParams()) + addrPubKeyHash := addrPubKey.AddressPubKeyHash() + return addrPubKeyHash +} diff --git a/modules/nodesale/purchase.go b/modules/nodesale/purchase.go new file mode 100644 index 0000000..828e9b1 --- /dev/null +++ b/modules/nodesale/purchase.go @@ -0,0 +1,87 @@ +package nodesale + +import ( + "context" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/core/types" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + purchasevalidator "github.com/gaze-network/indexer-network/modules/nodesale/internal/validator/purchase" +) + +func (p *Processor) ProcessPurchase(ctx context.Context, qtx datagateway.NodeSaleDataGatewayWithTx, block *types.Block, event NodeSaleEvent) error { + purchase := event.EventMessage.Purchase + payload := purchase.Payload + + validator := purchasevalidator.New() + + validator.EqualXonlyPublicKey(payload.BuyerPublicKey, event.TxPubkey) + + _, deploy, err := validator.NodeSaleExists(ctx, qtx, payload) + if err != nil { + return errors.Wrap(err, "cannot query. Something wrong.") + } + + validator.ValidTimestamp(deploy, block.Header.Timestamp) + validator.WithinTimeoutBlock(payload.TimeOutBlock, uint64(event.Transaction.BlockHeight)) + + validator.VerifySignature(purchase, deploy) + + _, tierMap := validator.ValidTiers(payload, deploy) + + tiers := tierMap.Tiers + buyingTiersCount := tierMap.BuyingTiersCount + nodeIdToTier := tierMap.NodeIdToTier + + _, err = validator.ValidUnpurchasedNodes(ctx, qtx, payload) + if err != nil { + return errors.Wrap(err, "cannot query. Something wrong.") + } + + _, meta := validator.ValidPaidAmount(payload, deploy, event.InputValue, tiers, buyingTiersCount, p.Network.ChainParams()) + + _, err = validator.WithinLimit(ctx, qtx, payload, deploy, tiers, buyingTiersCount) + if err != nil { + return errors.Wrap(err, "cannot query. Something wrong.") + } + + err = qtx.CreateEvent(ctx, entity.NodeSaleEvent{ + TxHash: event.Transaction.TxHash.String(), + TxIndex: int32(event.Transaction.Index), + Action: int32(event.EventMessage.Action), + RawMessage: event.RawData, + ParsedMessage: event.EventJson, + BlockTimestamp: block.Header.Timestamp, + BlockHash: event.Transaction.BlockHash.String(), + BlockHeight: event.Transaction.BlockHeight, + Valid: validator.Valid, + WalletAddress: p.PubkeyToPkHashAddress(event.TxPubkey).EncodeAddress(), + Metadata: meta, + Reason: validator.Reason, + }) + if err != nil { + return errors.Wrap(err, "Failed to insert event") + } + + if validator.Valid { + // add to node + for _, nodeId := range payload.NodeIDs { + err := qtx.CreateNode(ctx, entity.Node{ + SaleBlock: deploy.BlockHeight, + SaleTxIndex: deploy.TxIndex, + NodeID: nodeId, + TierIndex: nodeIdToTier[nodeId], + DelegatedTo: "", + OwnerPublicKey: payload.BuyerPublicKey, + PurchaseTxHash: event.Transaction.TxHash.String(), + DelegateTxHash: "", + }) + if err != nil { + return errors.Wrap(err, "Failed to insert node") + } + } + } + + return nil +} diff --git a/modules/nodesale/purchase_test.go b/modules/nodesale/purchase_test.go new file mode 100644 index 0000000..f527d09 --- /dev/null +++ b/modules/nodesale/purchase_test.go @@ -0,0 +1,902 @@ +package nodesale + +import ( + "context" + "encoding/hex" + "testing" + "time" + + "github.com/btcsuite/btcd/btcec/v2" + "github.com/btcsuite/btcd/chaincfg/chainhash" + "github.com/decred/dcrd/dcrec/secp256k1/v4/ecdsa" + "github.com/gaze-network/indexer-network/common" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway/mocks" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/validator" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/validator/purchase" + "github.com/gaze-network/indexer-network/modules/nodesale/protobuf" + "github.com/samber/lo" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" +) + +func TestInvalidPurchase(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + buyerPrivateKey, err := btcec.NewPrivateKey() + require.NoError(t, err) + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 111, + TxIndex: 1, + }, + NodeIDs: []uint32{1, 2}, + BuyerPublicKey: buyerPubkeyHex, + TotalAmountSat: 500, + TimeOutBlock: uint64(testBlockHeight) + 5, + }, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "030303030303", "030303030303", 0, 0, message) + + mockDgTx.EXPECT().GetNodeSale(mock.Anything, mock.Anything).Return(nil, nil) + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false + })).Return(nil) + + err = p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) + + mockDgTx.AssertNotCalled(t, "CreateNode") +} + +func TestInvalidBuyerKey(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + strangerPrivateKey, _ := btcec.NewPrivateKey() + strangerPrivateKeyHex := hex.EncodeToString(strangerPrivateKey.PubKey().SerializeCompressed()) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 100, + TxIndex: 1, + }, + NodeIDs: []uint32{1, 2}, + BuyerPublicKey: strangerPrivateKeyHex, + TotalAmountSat: 200, + TimeOutBlock: uint64(testBlockHeight) + 5, + }, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "0707070707", "0707070707", 0, 0, message) + block.Header.Timestamp = time.Now().UTC() + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false && event.Reason == validator.INVALID_PUBKEY + })).Return(nil) + + err := p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) + + mockDgTx.AssertNotCalled(t, "CreateNode") +} + +func TestInvalidTimestamp(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + sellerPrivateKey, err := btcec.NewPrivateKey() + require.NoError(t, err) + + sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed()) + sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey()) + + startAt := time.Now().Add(time.Hour * -1) + endAt := time.Now().Add(time.Hour * 1) + + tiers := lo.Map([]*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 5, + MaxPerAddress: 100, + }, + }, func(tier *protobuf.Tier, _ int) []byte { + tierJson, err := protojson.Marshal(tier) + require.NoError(t, err) + return tierJson + }) + mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{ + BlockHeight: 100, + TxIndex: 1, + }).Return([]entity.NodeSale{ + { + BlockHeight: 100, + TxIndex: 1, + Name: t.Name(), + StartsAt: startAt, + EndsAt: endAt, + Tiers: tiers, + SellerPublicKey: sellerPubkeyHex, + MaxPerAddress: 100, + DeployTxHash: "040404040404", + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + }, nil) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 100, + TxIndex: 1, + }, + NodeIDs: []uint32{1, 2}, + BuyerPublicKey: buyerPubkeyHex, + TotalAmountSat: 200, + TimeOutBlock: uint64(testBlockHeight) + 5, + }, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "050505050505", "050505050505", 0, 0, message) + + block.Header.Timestamp = time.Now().UTC().Add(time.Hour * 2) + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false && event.Reason == purchase.PURCHASE_TIMEOUT + })).Return(nil) + + err = p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) + + mockDgTx.AssertNotCalled(t, "CreateNode") +} + +func TestTimeOut(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + sellerPrivateKey, _ := btcec.NewPrivateKey() + sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed()) + sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey()) + + startAt := time.Now().Add(time.Hour * -1) + endAt := time.Now().Add(time.Hour * 1) + + tiers := lo.Map([]*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 5, + MaxPerAddress: 100, + }, + }, func(tier *protobuf.Tier, _ int) []byte { + tierJson, err := protojson.Marshal(tier) + require.NoError(t, err) + return tierJson + }) + + mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{ + BlockHeight: 100, + TxIndex: 1, + }).Return([]entity.NodeSale{ + { + BlockHeight: 100, + TxIndex: 1, + Name: t.Name(), + StartsAt: startAt, + EndsAt: endAt, + Tiers: tiers, + SellerPublicKey: sellerPubkeyHex, + MaxPerAddress: 100, + DeployTxHash: "040404040404", + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + }, nil) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 100, + TxIndex: 1, + }, + NodeIDs: []uint32{1, 2}, + BuyerPublicKey: buyerPubkeyHex, + TimeOutBlock: uint64(testBlockHeight) - 5, + TotalAmountSat: 200, + }, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "090909090909", "090909090909", 0, 0, message) + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false && event.Reason == purchase.BLOCK_HEIGHT_TIMEOUT + })).Return(nil) + + err := p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) + + mockDgTx.AssertNotCalled(t, "CreateNode") +} + +func TestSignatureInvalid(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + sellerPrivateKey, _ := btcec.NewPrivateKey() + sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed()) + sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey()) + + startAt := time.Now().Add(time.Hour * -1) + endAt := time.Now().Add(time.Hour * 1) + + tiers := lo.Map([]*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 5, + MaxPerAddress: 100, + }, + }, func(tier *protobuf.Tier, _ int) []byte { + tierJson, err := protojson.Marshal(tier) + require.NoError(t, err) + return tierJson + }) + mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{ + BlockHeight: 100, + TxIndex: 1, + }).Return([]entity.NodeSale{ + { + BlockHeight: 100, + TxIndex: 1, + Name: t.Name(), + StartsAt: startAt, + EndsAt: endAt, + Tiers: tiers, + SellerPublicKey: sellerPubkeyHex, + MaxPerAddress: 100, + DeployTxHash: "040404040404", + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + }, nil) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + payload := &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 100, + TxIndex: 1, + }, + NodeIDs: []uint32{1, 2}, + BuyerPublicKey: buyerPubkeyHex, + TimeOutBlock: testBlockHeight + 5, + } + + payloadBytes, _ := proto.Marshal(payload) + payloadHash := chainhash.DoubleHashB(payloadBytes) + signature := ecdsa.Sign(buyerPrivateKey, payloadHash[:]) + signatureHex := hex.EncodeToString(signature.Serialize()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: payload, + SellerSignature: signatureHex, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "0B0B0B", "0B0B0B", 0, 0, message) + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false && event.Reason == purchase.INVALID_SIGNATURE + })).Return(nil) + + err := p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) + + mockDgTx.AssertNotCalled(t, "CreateNode") +} + +func TestValidPurchase(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + sellerPrivateKey, _ := btcec.NewPrivateKey() + sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed()) + sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey()) + + startAt := time.Now().Add(time.Hour * -1) + endAt := time.Now().Add(time.Hour * 1) + + tiers := lo.Map([]*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 4, + MaxPerAddress: 2, + }, + { + PriceSat: 400, + Limit: 3, + MaxPerAddress: 100, + }, + }, func(tier *protobuf.Tier, _ int) []byte { + tierJson, err := protojson.Marshal(tier) + require.NoError(t, err) + return tierJson + }) + + mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{ + BlockHeight: 100, + TxIndex: 1, + }).Return([]entity.NodeSale{ + { + BlockHeight: 100, + TxIndex: 1, + Name: t.Name(), + StartsAt: startAt, + EndsAt: endAt, + Tiers: tiers, + SellerPublicKey: sellerPubkeyHex, + MaxPerAddress: 100, + DeployTxHash: "040404040404", + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + }, nil) + + mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil) + + mockDgTx.EXPECT().GetNodesByOwner(mock.Anything, mock.Anything).Return(nil, nil) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + payload := &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 100, + TxIndex: 1, + }, + BuyerPublicKey: buyerPubkeyHex, + TimeOutBlock: uint64(testBlockHeight) + 5, + NodeIDs: []uint32{0, 5, 6, 9}, + TotalAmountSat: 500, + } + + payloadBytes, _ := proto.Marshal(payload) + payloadHash := chainhash.DoubleHashB(payloadBytes) + signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:]) + signatureHex := hex.EncodeToString(signature.Serialize()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: payload, + SellerSignature: signatureHex, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "0D0D0D0D", "0D0D0D0D", 0, 0, message) + event.InputValue = 500 + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == true && event.Reason == "" + })).Return(nil) + + mockDgTx.EXPECT().CreateNode(mock.Anything, mock.MatchedBy(func(node entity.Node) bool { + return node.NodeID == 0 && + node.TierIndex == 0 && + node.OwnerPublicKey == buyerPubkeyHex && + node.PurchaseTxHash == event.Transaction.TxHash.String() && + node.SaleBlock == 100 && + node.SaleTxIndex == 1 + })).Return(nil) + + mockDgTx.EXPECT().CreateNode(mock.Anything, mock.MatchedBy(func(node entity.Node) bool { + return node.NodeID == 5 && + node.TierIndex == 1 && + node.OwnerPublicKey == buyerPubkeyHex && + node.PurchaseTxHash == event.Transaction.TxHash.String() && + node.SaleBlock == 100 && + node.SaleTxIndex == 1 + })).Return(nil) + + mockDgTx.EXPECT().CreateNode(mock.Anything, mock.MatchedBy(func(node entity.Node) bool { + return node.NodeID == 6 && + node.TierIndex == 1 && + node.OwnerPublicKey == buyerPubkeyHex && + node.PurchaseTxHash == event.Transaction.TxHash.String() && + node.SaleBlock == 100 && + node.SaleTxIndex == 1 + })).Return(nil) + + mockDgTx.EXPECT().CreateNode(mock.Anything, mock.MatchedBy(func(node entity.Node) bool { + return node.NodeID == 9 && + node.TierIndex == 2 && + node.OwnerPublicKey == buyerPubkeyHex && + node.PurchaseTxHash == event.Transaction.TxHash.String() && + node.SaleBlock == 100 && + node.SaleTxIndex == 1 + })).Return(nil) + + err := p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) +} + +func TestMismatchPayment(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + sellerPrivateKey, _ := btcec.NewPrivateKey() + sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed()) + sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey()) + + startAt := time.Now().Add(time.Hour * -1) + endAt := time.Now().Add(time.Hour * 1) + + tiers := lo.Map([]*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 4, + MaxPerAddress: 2, + }, + { + PriceSat: 400, + Limit: 3, + MaxPerAddress: 100, + }, + }, func(tier *protobuf.Tier, _ int) []byte { + tierJson, err := protojson.Marshal(tier) + require.NoError(t, err) + return tierJson + }) + + mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{ + BlockHeight: 100, + TxIndex: 1, + }).Return([]entity.NodeSale{ + { + BlockHeight: 100, + TxIndex: 1, + Name: t.Name(), + StartsAt: startAt, + EndsAt: endAt, + Tiers: tiers, + SellerPublicKey: sellerPubkeyHex, + MaxPerAddress: 100, + DeployTxHash: "040404040404", + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + }, nil) + + mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + payload := &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 100, + TxIndex: 1, + }, + BuyerPublicKey: buyerPubkeyHex, + TimeOutBlock: uint64(testBlockHeight) + 5, + NodeIDs: []uint32{0, 5, 6, 9}, + TotalAmountSat: 500, + } + + payloadBytes, _ := proto.Marshal(payload) + payloadHash := chainhash.DoubleHashB(payloadBytes) + signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:]) + signatureHex := hex.EncodeToString(signature.Serialize()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: payload, + SellerSignature: signatureHex, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "0D0D0D0D", "0D0D0D0D", 0, 0, message) + event.InputValue = 400 + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false && event.Reason == purchase.INVALID_PAYMENT + })).Return(nil) + + err := p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) +} + +func TestInsufficientFund(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + sellerPrivateKey, _ := btcec.NewPrivateKey() + sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed()) + sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey()) + + startAt := time.Now().Add(time.Hour * -1) + endAt := time.Now().Add(time.Hour * 1) + + tiers := lo.Map([]*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 4, + MaxPerAddress: 2, + }, + { + PriceSat: 400, + Limit: 3, + MaxPerAddress: 100, + }, + }, func(tier *protobuf.Tier, _ int) []byte { + tierJson, err := protojson.Marshal(tier) + require.NoError(t, err) + return tierJson + }) + + mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{ + BlockHeight: 100, + TxIndex: 1, + }).Return([]entity.NodeSale{ + { + BlockHeight: 100, + TxIndex: 1, + Name: t.Name(), + StartsAt: startAt, + EndsAt: endAt, + Tiers: tiers, + SellerPublicKey: sellerPubkeyHex, + MaxPerAddress: 100, + DeployTxHash: "040404040404", + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + }, nil) + + mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + payload := &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 100, + TxIndex: 1, + }, + BuyerPublicKey: buyerPubkeyHex, + TimeOutBlock: uint64(testBlockHeight) + 5, + NodeIDs: []uint32{0, 5, 6, 9}, + TotalAmountSat: 200, + } + + payloadBytes, _ := proto.Marshal(payload) + payloadHash := chainhash.DoubleHashB(payloadBytes) + signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:]) + signatureHex := hex.EncodeToString(signature.Serialize()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: payload, + SellerSignature: signatureHex, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "0D0D0D0D", "0D0D0D0D", 0, 0, message) + event.InputValue = 200 + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false && event.Reason == purchase.INSUFFICIENT_FUND + })).Return(nil) + + err := p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) +} + +func TestBuyingLimit(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + sellerPrivateKey, _ := btcec.NewPrivateKey() + sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed()) + sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey()) + + startAt := time.Now().Add(time.Hour * -1) + endAt := time.Now().Add(time.Hour * 1) + + tiers := lo.Map([]*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 4, + MaxPerAddress: 2, + }, + { + PriceSat: 400, + Limit: 50, + MaxPerAddress: 100, + }, + }, func(tier *protobuf.Tier, _ int) []byte { + tierJson, err := protojson.Marshal(tier) + require.NoError(t, err) + return tierJson + }) + + mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{ + BlockHeight: 100, + TxIndex: 1, + }).Return([]entity.NodeSale{ + { + BlockHeight: 100, + TxIndex: 1, + Name: t.Name(), + StartsAt: startAt, + EndsAt: endAt, + Tiers: tiers, + SellerPublicKey: sellerPubkeyHex, + MaxPerAddress: 2, + DeployTxHash: "040404040404", + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + }, nil) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil) + + mockDgTx.EXPECT().GetNodesByOwner(mock.Anything, datagateway.GetNodesByOwnerParams{ + SaleBlock: 100, + SaleTxIndex: 1, + OwnerPublicKey: buyerPubkeyHex, + }).Return([]entity.Node{ + { + SaleBlock: 100, + SaleTxIndex: 1, + NodeID: 9, + TierIndex: 2, + OwnerPublicKey: buyerPubkeyHex, + }, + { + SaleBlock: 100, + SaleTxIndex: 1, + NodeID: 10, + TierIndex: 2, + OwnerPublicKey: buyerPubkeyHex, + }, + }, nil) + + payload := &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 100, + TxIndex: 1, + }, + BuyerPublicKey: buyerPubkeyHex, + TimeOutBlock: uint64(testBlockHeight) + 5, + NodeIDs: []uint32{11}, + TotalAmountSat: 600, + } + + payloadBytes, _ := proto.Marshal(payload) + payloadHash := chainhash.DoubleHashB(payloadBytes) + signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:]) + signatureHex := hex.EncodeToString(signature.Serialize()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: payload, + SellerSignature: signatureHex, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "22222222", "22222222", 0, 0, message) + event.InputValue = 600 + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false && event.Reason == purchase.OVER_LIMIT_PER_ADDR + })).Return(nil) + + err := p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) + + mockDgTx.AssertNotCalled(t, "CreateNode") +} + +func TestBuyingTierLimit(t *testing.T) { + ctx := context.Background() + mockDgTx := mocks.NewNodeSaleDataGatewayWithTx(t) + p := NewProcessor(mockDgTx, nil, common.NetworkMainnet, nil, 0) + + sellerPrivateKey, _ := btcec.NewPrivateKey() + sellerPubkeyHex := hex.EncodeToString(sellerPrivateKey.PubKey().SerializeCompressed()) + sellerWallet := p.PubkeyToPkHashAddress(sellerPrivateKey.PubKey()) + + startAt := time.Now().Add(time.Hour * -1) + endAt := time.Now().Add(time.Hour * 1) + + tiers := lo.Map([]*protobuf.Tier{ + { + PriceSat: 100, + Limit: 5, + MaxPerAddress: 100, + }, + { + PriceSat: 200, + Limit: 4, + MaxPerAddress: 2, + }, + { + PriceSat: 400, + Limit: 50, + MaxPerAddress: 3, + }, + }, func(tier *protobuf.Tier, _ int) []byte { + tierJson, err := protojson.Marshal(tier) + require.NoError(t, err) + return tierJson + }) + + mockDgTx.EXPECT().GetNodeSale(mock.Anything, datagateway.GetNodeSaleParams{ + BlockHeight: 100, + TxIndex: 1, + }).Return([]entity.NodeSale{ + { + BlockHeight: 100, + TxIndex: 1, + Name: t.Name(), + StartsAt: startAt, + EndsAt: endAt, + Tiers: tiers, + SellerPublicKey: sellerPubkeyHex, + MaxPerAddress: 100, + DeployTxHash: "040404040404", + MaxDiscountPercentage: 50, + SellerWallet: sellerWallet.EncodeAddress(), + }, + }, nil) + + buyerPrivateKey, _ := btcec.NewPrivateKey() + buyerPubkeyHex := hex.EncodeToString(buyerPrivateKey.PubKey().SerializeCompressed()) + + mockDgTx.EXPECT().GetNodesByIds(mock.Anything, mock.Anything).Return(nil, nil) + + mockDgTx.EXPECT().GetNodesByOwner(mock.Anything, datagateway.GetNodesByOwnerParams{ + SaleBlock: 100, + SaleTxIndex: 1, + OwnerPublicKey: buyerPubkeyHex, + }).Return([]entity.Node{ + { + SaleBlock: 100, + SaleTxIndex: 1, + NodeID: 9, + TierIndex: 2, + OwnerPublicKey: buyerPubkeyHex, + }, + { + SaleBlock: 100, + SaleTxIndex: 1, + NodeID: 10, + TierIndex: 2, + OwnerPublicKey: buyerPubkeyHex, + }, + { + SaleBlock: 100, + SaleTxIndex: 1, + NodeID: 11, + TierIndex: 2, + OwnerPublicKey: buyerPubkeyHex, + }, + }, nil) + + payload := &protobuf.PurchasePayload{ + DeployID: &protobuf.ActionID{ + Block: 100, + TxIndex: 1, + }, + BuyerPublicKey: buyerPubkeyHex, + TimeOutBlock: uint64(testBlockHeight) + 5, + NodeIDs: []uint32{12, 13, 14}, + TotalAmountSat: 600, + } + + payloadBytes, _ := proto.Marshal(payload) + payloadHash := chainhash.DoubleHashB(payloadBytes) + signature := ecdsa.Sign(sellerPrivateKey, payloadHash[:]) + signatureHex := hex.EncodeToString(signature.Serialize()) + + message := &protobuf.NodeSaleEvent{ + Action: protobuf.Action_ACTION_PURCHASE, + Purchase: &protobuf.ActionPurchase{ + Payload: payload, + SellerSignature: signatureHex, + }, + } + + event, block := assembleTestEvent(buyerPrivateKey, "10101010", "10101010", 0, 0, message) + event.InputValue = 600 + + mockDgTx.EXPECT().CreateEvent(mock.Anything, mock.MatchedBy(func(event entity.NodeSaleEvent) bool { + return event.Valid == false && event.Reason == purchase.OVER_LIMIT_PER_TIER + })).Return(nil) + + err := p.ProcessPurchase(ctx, mockDgTx, block, event) + require.NoError(t, err) +} diff --git a/modules/nodesale/repository/postgres/gen/blocks.sql.go b/modules/nodesale/repository/postgres/gen/blocks.sql.go new file mode 100644 index 0000000..970bb84 --- /dev/null +++ b/modules/nodesale/repository/postgres/gen/blocks.sql.go @@ -0,0 +1,62 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.26.0 +// source: blocks.sql + +package gen + +import ( + "context" +) + +const createBlock = `-- name: CreateBlock :exec +INSERT INTO blocks ("block_height", "block_hash", "module") +VALUES ($1, $2, $3) +` + +type CreateBlockParams struct { + BlockHeight int64 + BlockHash string + Module string +} + +func (q *Queries) CreateBlock(ctx context.Context, arg CreateBlockParams) error { + _, err := q.db.Exec(ctx, createBlock, arg.BlockHeight, arg.BlockHash, arg.Module) + return err +} + +const getBlock = `-- name: GetBlock :one +SELECT block_height, block_hash, module FROM blocks +WHERE "block_height" = $1 +` + +func (q *Queries) GetBlock(ctx context.Context, blockHeight int64) (Block, error) { + row := q.db.QueryRow(ctx, getBlock, blockHeight) + var i Block + err := row.Scan(&i.BlockHeight, &i.BlockHash, &i.Module) + return i, err +} + +const getLastProcessedBlock = `-- name: GetLastProcessedBlock :one +SELECT block_height, block_hash, module FROM blocks ORDER BY block_height DESC LIMIT 1 +` + +func (q *Queries) GetLastProcessedBlock(ctx context.Context) (Block, error) { + row := q.db.QueryRow(ctx, getLastProcessedBlock) + var i Block + err := row.Scan(&i.BlockHeight, &i.BlockHash, &i.Module) + return i, err +} + +const removeBlockFrom = `-- name: RemoveBlockFrom :execrows +DELETE FROM blocks +WHERE "block_height" >= $1 +` + +func (q *Queries) RemoveBlockFrom(ctx context.Context, fromBlock int64) (int64, error) { + result, err := q.db.Exec(ctx, removeBlockFrom, fromBlock) + if err != nil { + return 0, err + } + return result.RowsAffected(), nil +} diff --git a/modules/nodesale/repository/postgres/gen/db.go b/modules/nodesale/repository/postgres/gen/db.go new file mode 100644 index 0000000..3ccd3c9 --- /dev/null +++ b/modules/nodesale/repository/postgres/gen/db.go @@ -0,0 +1,32 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.26.0 + +package gen + +import ( + "context" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgconn" +) + +type DBTX interface { + Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error) + Query(context.Context, string, ...interface{}) (pgx.Rows, error) + QueryRow(context.Context, string, ...interface{}) pgx.Row +} + +func New(db DBTX) *Queries { + return &Queries{db: db} +} + +type Queries struct { + db DBTX +} + +func (q *Queries) WithTx(tx pgx.Tx) *Queries { + return &Queries{ + db: tx, + } +} diff --git a/modules/nodesale/repository/postgres/gen/events.sql.go b/modules/nodesale/repository/postgres/gen/events.sql.go new file mode 100644 index 0000000..1c4086a --- /dev/null +++ b/modules/nodesale/repository/postgres/gen/events.sql.go @@ -0,0 +1,104 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.26.0 +// source: events.sql + +package gen + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const createEvent = `-- name: CreateEvent :exec +INSERT INTO events ("tx_hash", "block_height", "tx_index", "wallet_address", "valid", "action", + "raw_message", "parsed_message", "block_timestamp", "block_hash", "metadata", + "reason") +VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) +` + +type CreateEventParams struct { + TxHash string + BlockHeight int64 + TxIndex int32 + WalletAddress string + Valid bool + Action int32 + RawMessage []byte + ParsedMessage []byte + BlockTimestamp pgtype.Timestamp + BlockHash string + Metadata []byte + Reason string +} + +func (q *Queries) CreateEvent(ctx context.Context, arg CreateEventParams) error { + _, err := q.db.Exec(ctx, createEvent, + arg.TxHash, + arg.BlockHeight, + arg.TxIndex, + arg.WalletAddress, + arg.Valid, + arg.Action, + arg.RawMessage, + arg.ParsedMessage, + arg.BlockTimestamp, + arg.BlockHash, + arg.Metadata, + arg.Reason, + ) + return err +} + +const getEventsByWallet = `-- name: GetEventsByWallet :many +SELECT tx_hash, block_height, tx_index, wallet_address, valid, action, raw_message, parsed_message, block_timestamp, block_hash, metadata, reason +FROM events +WHERE wallet_address = $1 +` + +func (q *Queries) GetEventsByWallet(ctx context.Context, walletAddress string) ([]Event, error) { + rows, err := q.db.Query(ctx, getEventsByWallet, walletAddress) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Event + for rows.Next() { + var i Event + if err := rows.Scan( + &i.TxHash, + &i.BlockHeight, + &i.TxIndex, + &i.WalletAddress, + &i.Valid, + &i.Action, + &i.RawMessage, + &i.ParsedMessage, + &i.BlockTimestamp, + &i.BlockHash, + &i.Metadata, + &i.Reason, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const removeEventsFromBlock = `-- name: RemoveEventsFromBlock :execrows +DELETE FROM events +WHERE "block_height" >= $1 +` + +func (q *Queries) RemoveEventsFromBlock(ctx context.Context, fromBlock int64) (int64, error) { + result, err := q.db.Exec(ctx, removeEventsFromBlock, fromBlock) + if err != nil { + return 0, err + } + return result.RowsAffected(), nil +} diff --git a/modules/nodesale/repository/postgres/gen/models.go b/modules/nodesale/repository/postgres/gen/models.go new file mode 100644 index 0000000..91b25e0 --- /dev/null +++ b/modules/nodesale/repository/postgres/gen/models.go @@ -0,0 +1,55 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.26.0 + +package gen + +import ( + "github.com/jackc/pgx/v5/pgtype" +) + +type Block struct { + BlockHeight int64 + BlockHash string + Module string +} + +type Event struct { + TxHash string + BlockHeight int64 + TxIndex int32 + WalletAddress string + Valid bool + Action int32 + RawMessage []byte + ParsedMessage []byte + BlockTimestamp pgtype.Timestamp + BlockHash string + Metadata []byte + Reason string +} + +type Node struct { + SaleBlock int64 + SaleTxIndex int32 + NodeID int32 + TierIndex int32 + DelegatedTo string + OwnerPublicKey string + PurchaseTxHash string + DelegateTxHash string +} + +type NodeSale struct { + BlockHeight int64 + TxIndex int32 + Name string + StartsAt pgtype.Timestamp + EndsAt pgtype.Timestamp + Tiers [][]byte + SellerPublicKey string + MaxPerAddress int32 + DeployTxHash string + MaxDiscountPercentage int32 + SellerWallet string +} diff --git a/modules/nodesale/repository/postgres/gen/nodes.sql.go b/modules/nodesale/repository/postgres/gen/nodes.sql.go new file mode 100644 index 0000000..90de1ea --- /dev/null +++ b/modules/nodesale/repository/postgres/gen/nodes.sql.go @@ -0,0 +1,271 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.26.0 +// source: nodes.sql + +package gen + +import ( + "context" +) + +const clearDelegate = `-- name: ClearDelegate :execrows +UPDATE nodes +SET "delegated_to" = '' +WHERE "delegate_tx_hash" = '' +` + +func (q *Queries) ClearDelegate(ctx context.Context) (int64, error) { + result, err := q.db.Exec(ctx, clearDelegate) + if err != nil { + return 0, err + } + return result.RowsAffected(), nil +} + +const createNode = `-- name: CreateNode :exec +INSERT INTO nodes (sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash) +VALUES ($1, $2, $3, $4, $5, $6, $7, $8) +` + +type CreateNodeParams struct { + SaleBlock int64 + SaleTxIndex int32 + NodeID int32 + TierIndex int32 + DelegatedTo string + OwnerPublicKey string + PurchaseTxHash string + DelegateTxHash string +} + +func (q *Queries) CreateNode(ctx context.Context, arg CreateNodeParams) error { + _, err := q.db.Exec(ctx, createNode, + arg.SaleBlock, + arg.SaleTxIndex, + arg.NodeID, + arg.TierIndex, + arg.DelegatedTo, + arg.OwnerPublicKey, + arg.PurchaseTxHash, + arg.DelegateTxHash, + ) + return err +} + +const getNodeCountByTierIndex = `-- name: GetNodeCountByTierIndex :many +SELECT (tiers.tier_index)::int AS tier_index, count(nodes.tier_index) +FROM generate_series($3::int,$4::int) AS tiers(tier_index) +LEFT JOIN + (SELECT sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash + FROM nodes + WHERE sale_block = $1 AND + sale_tx_index= $2) + AS nodes ON tiers.tier_index = nodes.tier_index +GROUP BY tiers.tier_index +ORDER BY tiers.tier_index +` + +type GetNodeCountByTierIndexParams struct { + SaleBlock int64 + SaleTxIndex int32 + FromTier int32 + ToTier int32 +} + +type GetNodeCountByTierIndexRow struct { + TierIndex int32 + Count int64 +} + +func (q *Queries) GetNodeCountByTierIndex(ctx context.Context, arg GetNodeCountByTierIndexParams) ([]GetNodeCountByTierIndexRow, error) { + rows, err := q.db.Query(ctx, getNodeCountByTierIndex, + arg.SaleBlock, + arg.SaleTxIndex, + arg.FromTier, + arg.ToTier, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetNodeCountByTierIndexRow + for rows.Next() { + var i GetNodeCountByTierIndexRow + if err := rows.Scan(&i.TierIndex, &i.Count); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getNodesByIds = `-- name: GetNodesByIds :many +SELECT sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash +FROM nodes +WHERE sale_block = $1 AND + sale_tx_index = $2 AND + node_id = ANY ($3::int[]) +` + +type GetNodesByIdsParams struct { + SaleBlock int64 + SaleTxIndex int32 + NodeIds []int32 +} + +func (q *Queries) GetNodesByIds(ctx context.Context, arg GetNodesByIdsParams) ([]Node, error) { + rows, err := q.db.Query(ctx, getNodesByIds, arg.SaleBlock, arg.SaleTxIndex, arg.NodeIds) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Node + for rows.Next() { + var i Node + if err := rows.Scan( + &i.SaleBlock, + &i.SaleTxIndex, + &i.NodeID, + &i.TierIndex, + &i.DelegatedTo, + &i.OwnerPublicKey, + &i.PurchaseTxHash, + &i.DelegateTxHash, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getNodesByOwner = `-- name: GetNodesByOwner :many +SELECT sale_block, sale_tx_index, node_id, tier_index, delegated_to, owner_public_key, purchase_tx_hash, delegate_tx_hash +FROM nodes +WHERE sale_block = $1 AND + sale_tx_index = $2 AND + owner_public_key = $3 +ORDER BY tier_index +` + +type GetNodesByOwnerParams struct { + SaleBlock int64 + SaleTxIndex int32 + OwnerPublicKey string +} + +func (q *Queries) GetNodesByOwner(ctx context.Context, arg GetNodesByOwnerParams) ([]Node, error) { + rows, err := q.db.Query(ctx, getNodesByOwner, arg.SaleBlock, arg.SaleTxIndex, arg.OwnerPublicKey) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Node + for rows.Next() { + var i Node + if err := rows.Scan( + &i.SaleBlock, + &i.SaleTxIndex, + &i.NodeID, + &i.TierIndex, + &i.DelegatedTo, + &i.OwnerPublicKey, + &i.PurchaseTxHash, + &i.DelegateTxHash, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const getNodesByPubkey = `-- name: GetNodesByPubkey :many +SELECT nodes.sale_block, nodes.sale_tx_index, nodes.node_id, nodes.tier_index, nodes.delegated_to, nodes.owner_public_key, nodes.purchase_tx_hash, nodes.delegate_tx_hash +FROM nodes JOIN events ON nodes.purchase_tx_hash = events.tx_hash +WHERE sale_block = $1 AND + sale_tx_index = $2 AND + owner_public_key = $3 AND + delegated_to = $4 +` + +type GetNodesByPubkeyParams struct { + SaleBlock int64 + SaleTxIndex int32 + OwnerPublicKey string + DelegatedTo string +} + +func (q *Queries) GetNodesByPubkey(ctx context.Context, arg GetNodesByPubkeyParams) ([]Node, error) { + rows, err := q.db.Query(ctx, getNodesByPubkey, + arg.SaleBlock, + arg.SaleTxIndex, + arg.OwnerPublicKey, + arg.DelegatedTo, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Node + for rows.Next() { + var i Node + if err := rows.Scan( + &i.SaleBlock, + &i.SaleTxIndex, + &i.NodeID, + &i.TierIndex, + &i.DelegatedTo, + &i.OwnerPublicKey, + &i.PurchaseTxHash, + &i.DelegateTxHash, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const setDelegates = `-- name: SetDelegates :execrows +UPDATE nodes +SET delegated_to = $4, delegate_tx_hash = $3 +WHERE sale_block = $1 AND + sale_tx_index = $2 AND + node_id = ANY ($5::int[]) +` + +type SetDelegatesParams struct { + SaleBlock int64 + SaleTxIndex int32 + DelegateTxHash string + Delegatee string + NodeIds []int32 +} + +func (q *Queries) SetDelegates(ctx context.Context, arg SetDelegatesParams) (int64, error) { + result, err := q.db.Exec(ctx, setDelegates, + arg.SaleBlock, + arg.SaleTxIndex, + arg.DelegateTxHash, + arg.Delegatee, + arg.NodeIds, + ) + if err != nil { + return 0, err + } + return result.RowsAffected(), nil +} diff --git a/modules/nodesale/repository/postgres/gen/nodesales.sql.go b/modules/nodesale/repository/postgres/gen/nodesales.sql.go new file mode 100644 index 0000000..a6dc565 --- /dev/null +++ b/modules/nodesale/repository/postgres/gen/nodesales.sql.go @@ -0,0 +1,92 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.26.0 +// source: nodesales.sql + +package gen + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const createNodeSale = `-- name: CreateNodeSale :exec +INSERT INTO node_sales ("block_height", "tx_index", "name", "starts_at", "ends_at", "tiers", "seller_public_key", "max_per_address", "deploy_tx_hash", "max_discount_percentage", "seller_wallet") +VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) +` + +type CreateNodeSaleParams struct { + BlockHeight int64 + TxIndex int32 + Name string + StartsAt pgtype.Timestamp + EndsAt pgtype.Timestamp + Tiers [][]byte + SellerPublicKey string + MaxPerAddress int32 + DeployTxHash string + MaxDiscountPercentage int32 + SellerWallet string +} + +func (q *Queries) CreateNodeSale(ctx context.Context, arg CreateNodeSaleParams) error { + _, err := q.db.Exec(ctx, createNodeSale, + arg.BlockHeight, + arg.TxIndex, + arg.Name, + arg.StartsAt, + arg.EndsAt, + arg.Tiers, + arg.SellerPublicKey, + arg.MaxPerAddress, + arg.DeployTxHash, + arg.MaxDiscountPercentage, + arg.SellerWallet, + ) + return err +} + +const getNodeSale = `-- name: GetNodeSale :many +SELECT block_height, tx_index, name, starts_at, ends_at, tiers, seller_public_key, max_per_address, deploy_tx_hash, max_discount_percentage, seller_wallet +FROM node_sales +WHERE block_height = $1 AND + tx_index = $2 +` + +type GetNodeSaleParams struct { + BlockHeight int64 + TxIndex int32 +} + +func (q *Queries) GetNodeSale(ctx context.Context, arg GetNodeSaleParams) ([]NodeSale, error) { + rows, err := q.db.Query(ctx, getNodeSale, arg.BlockHeight, arg.TxIndex) + if err != nil { + return nil, err + } + defer rows.Close() + var items []NodeSale + for rows.Next() { + var i NodeSale + if err := rows.Scan( + &i.BlockHeight, + &i.TxIndex, + &i.Name, + &i.StartsAt, + &i.EndsAt, + &i.Tiers, + &i.SellerPublicKey, + &i.MaxPerAddress, + &i.DeployTxHash, + &i.MaxDiscountPercentage, + &i.SellerWallet, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/modules/nodesale/repository/postgres/gen/test.sql.go b/modules/nodesale/repository/postgres/gen/test.sql.go new file mode 100644 index 0000000..248ca00 --- /dev/null +++ b/modules/nodesale/repository/postgres/gen/test.sql.go @@ -0,0 +1,20 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.26.0 +// source: test.sql + +package gen + +import ( + "context" +) + +const clearEvents = `-- name: ClearEvents :exec +DELETE FROM events +WHERE tx_hash <> '' +` + +func (q *Queries) ClearEvents(ctx context.Context) error { + _, err := q.db.Exec(ctx, clearEvents) + return err +} diff --git a/modules/nodesale/repository/postgres/mapper.go b/modules/nodesale/repository/postgres/mapper.go new file mode 100644 index 0000000..72c4acd --- /dev/null +++ b/modules/nodesale/repository/postgres/mapper.go @@ -0,0 +1,74 @@ +package postgres + +import ( + "encoding/json" + + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + "github.com/gaze-network/indexer-network/modules/nodesale/repository/postgres/gen" + "github.com/samber/lo" +) + +func mapNodes(nodes []gen.Node) []entity.Node { + return lo.Map(nodes, func(item gen.Node, index int) entity.Node { + return entity.Node{ + SaleBlock: uint64(item.SaleBlock), + SaleTxIndex: uint32(item.SaleTxIndex), + NodeID: uint32(item.NodeID), + TierIndex: item.TierIndex, + DelegatedTo: item.DelegatedTo, + OwnerPublicKey: item.OwnerPublicKey, + PurchaseTxHash: item.PurchaseTxHash, + DelegateTxHash: item.DelegateTxHash, + } + }) +} + +func mapNodeSales(nodeSales []gen.NodeSale) []entity.NodeSale { + return lo.Map(nodeSales, func(item gen.NodeSale, index int) entity.NodeSale { + return entity.NodeSale{ + BlockHeight: uint64(item.BlockHeight), + TxIndex: uint32(item.TxIndex), + Name: item.Name, + StartsAt: item.StartsAt.Time, + EndsAt: item.EndsAt.Time, + Tiers: item.Tiers, + SellerPublicKey: item.SellerPublicKey, + MaxPerAddress: uint32(item.MaxPerAddress), + DeployTxHash: item.DeployTxHash, + MaxDiscountPercentage: item.MaxDiscountPercentage, + SellerWallet: item.SellerWallet, + } + }) +} + +func mapNodeCountByTierIndexRows(nodeCount []gen.GetNodeCountByTierIndexRow) []datagateway.GetNodeCountByTierIndexRow { + return lo.Map(nodeCount, func(item gen.GetNodeCountByTierIndexRow, index int) datagateway.GetNodeCountByTierIndexRow { + return datagateway.GetNodeCountByTierIndexRow{ + TierIndex: item.TierIndex, + } + }) +} + +func mapNodeSalesEvents(events []gen.Event) []entity.NodeSaleEvent { + return lo.Map(events, func(item gen.Event, index int) entity.NodeSaleEvent { + var meta entity.MetadataEventPurchase + err := json.Unmarshal(item.Metadata, &meta) + if err != nil { + meta = entity.MetadataEventPurchase{} + } + return entity.NodeSaleEvent{ + TxHash: item.TxHash, + BlockHeight: item.BlockHeight, + TxIndex: item.TxIndex, + WalletAddress: item.WalletAddress, + Valid: item.Valid, + Action: item.Action, + RawMessage: item.RawMessage, + ParsedMessage: item.ParsedMessage, + BlockTimestamp: item.BlockTimestamp.Time.UTC(), + BlockHash: item.BlockHash, + Metadata: &meta, + } + }) +} diff --git a/modules/nodesale/repository/postgres/repository.go b/modules/nodesale/repository/postgres/repository.go new file mode 100644 index 0000000..7d456c5 --- /dev/null +++ b/modules/nodesale/repository/postgres/repository.go @@ -0,0 +1,236 @@ +package postgres + +import ( + "context" + "encoding/json" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/internal/postgres" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/modules/nodesale/internal/entity" + "github.com/gaze-network/indexer-network/modules/nodesale/repository/postgres/gen" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" + "github.com/samber/lo" +) + +type Repository struct { + db postgres.DB + queries *gen.Queries + tx pgx.Tx +} + +func NewRepository(db postgres.DB) *Repository { + return &Repository{ + db: db, + queries: gen.New(db), + } +} + +func (repo *Repository) CreateBlock(ctx context.Context, arg entity.Block) error { + err := repo.queries.CreateBlock(ctx, gen.CreateBlockParams{ + BlockHeight: arg.BlockHeight, + BlockHash: arg.BlockHash, + Module: arg.Module, + }) + if err != nil { + return errors.Wrap(err, "Cannot Add block") + } + + return nil +} + +func (repo *Repository) GetBlock(ctx context.Context, blockHeight int64) (*entity.Block, error) { + block, err := repo.queries.GetBlock(ctx, blockHeight) + if err != nil { + return nil, errors.Wrap(err, "Cannot get block") + } + return &entity.Block{ + BlockHeight: block.BlockHeight, + BlockHash: block.BlockHash, + Module: block.Module, + }, nil +} + +func (repo *Repository) GetLastProcessedBlock(ctx context.Context) (*entity.Block, error) { + block, err := repo.queries.GetLastProcessedBlock(ctx) + if err != nil { + return nil, errors.Wrap(err, "Cannot get last processed block") + } + return &entity.Block{ + BlockHeight: block.BlockHeight, + BlockHash: block.BlockHash, + Module: block.Module, + }, nil +} + +func (repo *Repository) RemoveBlockFrom(ctx context.Context, fromBlock int64) (int64, error) { + affected, err := repo.queries.RemoveBlockFrom(ctx, fromBlock) + if err != nil { + return 0, errors.Wrap(err, "Cannot remove blocks") + } + return affected, nil +} + +func (repo *Repository) RemoveEventsFromBlock(ctx context.Context, fromBlock int64) (int64, error) { + affected, err := repo.queries.RemoveEventsFromBlock(ctx, fromBlock) + if err != nil { + return 0, errors.Wrap(err, "Cannot remove events") + } + return affected, nil +} + +func (repo *Repository) ClearDelegate(ctx context.Context) (int64, error) { + affected, err := repo.queries.ClearDelegate(ctx) + if err != nil { + return 0, errors.Wrap(err, "Cannot clear delegate") + } + return affected, nil +} + +func (repo *Repository) GetNodesByIds(ctx context.Context, arg datagateway.GetNodesByIdsParams) ([]entity.Node, error) { + nodes, err := repo.queries.GetNodesByIds(ctx, gen.GetNodesByIdsParams{ + SaleBlock: int64(arg.SaleBlock), + SaleTxIndex: int32(arg.SaleTxIndex), + NodeIds: lo.Map(arg.NodeIds, func(item uint32, index int) int32 { return int32(item) }), + }) + if err != nil { + return nil, errors.Wrap(err, "Cannot get nodes") + } + return mapNodes(nodes), nil +} + +func (repo *Repository) CreateEvent(ctx context.Context, arg entity.NodeSaleEvent) error { + metaDataBytes := []byte("{}") + if arg.Metadata != nil { + metaDataBytes, _ = json.Marshal(arg.Metadata) + } + err := repo.queries.CreateEvent(ctx, gen.CreateEventParams{ + TxHash: arg.TxHash, + BlockHeight: arg.BlockHeight, + TxIndex: arg.TxIndex, + WalletAddress: arg.WalletAddress, + Valid: arg.Valid, + Action: arg.Action, + RawMessage: arg.RawMessage, + ParsedMessage: arg.ParsedMessage, + BlockTimestamp: pgtype.Timestamp{Time: arg.BlockTimestamp.UTC(), Valid: true}, + BlockHash: arg.BlockHash, + Metadata: metaDataBytes, + Reason: arg.Reason, + }) + if err != nil { + return errors.Wrap(err, "Cannot add event") + } + return nil +} + +func (repo *Repository) SetDelegates(ctx context.Context, arg datagateway.SetDelegatesParams) (int64, error) { + affected, err := repo.queries.SetDelegates(ctx, gen.SetDelegatesParams{ + SaleBlock: int64(arg.SaleBlock), + SaleTxIndex: arg.SaleTxIndex, + Delegatee: arg.Delegatee, + DelegateTxHash: arg.DelegateTxHash, + NodeIds: lo.Map(arg.NodeIds, func(item uint32, index int) int32 { return int32(item) }), + }) + if err != nil { + return 0, errors.Wrap(err, "Cannot set delegate") + } + return affected, nil +} + +func (repo *Repository) CreateNodeSale(ctx context.Context, arg entity.NodeSale) error { + err := repo.queries.CreateNodeSale(ctx, gen.CreateNodeSaleParams{ + BlockHeight: int64(arg.BlockHeight), + TxIndex: int32(arg.TxIndex), + Name: arg.Name, + StartsAt: pgtype.Timestamp{Time: arg.StartsAt.UTC(), Valid: true}, + EndsAt: pgtype.Timestamp{Time: arg.EndsAt.UTC(), Valid: true}, + Tiers: arg.Tiers, + SellerPublicKey: arg.SellerPublicKey, + MaxPerAddress: int32(arg.MaxPerAddress), + DeployTxHash: arg.DeployTxHash, + MaxDiscountPercentage: arg.MaxDiscountPercentage, + SellerWallet: arg.SellerWallet, + }) + if err != nil { + return errors.Wrap(err, "Cannot add NodeSale") + } + return nil +} + +func (repo *Repository) GetNodeSale(ctx context.Context, arg datagateway.GetNodeSaleParams) ([]entity.NodeSale, error) { + nodeSales, err := repo.queries.GetNodeSale(ctx, gen.GetNodeSaleParams{ + BlockHeight: int64(arg.BlockHeight), + TxIndex: int32(arg.TxIndex), + }) + if err != nil { + return nil, errors.Wrap(err, "Cannot get NodeSale") + } + + return mapNodeSales(nodeSales), nil +} + +func (repo *Repository) GetNodesByOwner(ctx context.Context, arg datagateway.GetNodesByOwnerParams) ([]entity.Node, error) { + nodes, err := repo.queries.GetNodesByOwner(ctx, gen.GetNodesByOwnerParams{ + SaleBlock: int64(arg.SaleBlock), + SaleTxIndex: int32(arg.SaleTxIndex), + OwnerPublicKey: arg.OwnerPublicKey, + }) + if err != nil { + return nil, errors.Wrap(err, "Cannot get nodes by owner") + } + return mapNodes(nodes), nil +} + +func (repo *Repository) CreateNode(ctx context.Context, arg entity.Node) error { + err := repo.queries.CreateNode(ctx, gen.CreateNodeParams{ + SaleBlock: int64(arg.SaleBlock), + SaleTxIndex: int32(arg.SaleTxIndex), + NodeID: int32(arg.NodeID), + TierIndex: arg.TierIndex, + DelegatedTo: arg.DelegatedTo, + OwnerPublicKey: arg.OwnerPublicKey, + PurchaseTxHash: arg.PurchaseTxHash, + DelegateTxHash: arg.DelegateTxHash, + }) + if err != nil { + return errors.Wrap(err, "Cannot add node") + } + return nil +} + +func (repo *Repository) GetNodeCountByTierIndex(ctx context.Context, arg datagateway.GetNodeCountByTierIndexParams) ([]datagateway.GetNodeCountByTierIndexRow, error) { + nodeCount, err := repo.queries.GetNodeCountByTierIndex(ctx, gen.GetNodeCountByTierIndexParams{ + SaleBlock: int64(arg.SaleBlock), + SaleTxIndex: int32(arg.SaleTxIndex), + FromTier: int32(arg.FromTier), + ToTier: int32(arg.ToTier), + }) + if err != nil { + return nil, errors.Wrap(err, "Cannot get node count by tier index") + } + + return mapNodeCountByTierIndexRows(nodeCount), nil +} + +func (repo *Repository) GetNodesByPubkey(ctx context.Context, arg datagateway.GetNodesByPubkeyParams) ([]entity.Node, error) { + nodes, err := repo.queries.GetNodesByPubkey(ctx, gen.GetNodesByPubkeyParams{ + SaleBlock: arg.SaleBlock, + SaleTxIndex: arg.SaleTxIndex, + OwnerPublicKey: arg.OwnerPublicKey, + DelegatedTo: arg.DelegatedTo, + }) + if err != nil { + return nil, errors.Wrap(err, "Cannot get nodes by public key") + } + return mapNodes(nodes), nil +} + +func (repo *Repository) GetEventsByWallet(ctx context.Context, walletAddress string) ([]entity.NodeSaleEvent, error) { + events, err := repo.queries.GetEventsByWallet(ctx, walletAddress) + if err != nil { + return nil, errors.Wrap(err, "cannot get events by wallet") + } + return mapNodeSalesEvents(events), nil +} diff --git a/modules/nodesale/repository/postgres/tx.go b/modules/nodesale/repository/postgres/tx.go new file mode 100644 index 0000000..7dcbd22 --- /dev/null +++ b/modules/nodesale/repository/postgres/tx.go @@ -0,0 +1,62 @@ +package postgres + +import ( + "context" + + "github.com/cockroachdb/errors" + "github.com/gaze-network/indexer-network/modules/nodesale/datagateway" + "github.com/gaze-network/indexer-network/pkg/logger" + "github.com/jackc/pgx/v5" +) + +var ErrTxAlreadyExists = errors.New("Transaction already exists. Call Commit() or Rollback() first.") + +func (r *Repository) begin(ctx context.Context) (*Repository, error) { + if r.tx != nil { + return nil, errors.WithStack(ErrTxAlreadyExists) + } + tx, err := r.db.Begin(ctx) + if err != nil { + return nil, errors.Wrap(err, "failed to begin transaction") + } + return &Repository{ + db: r.db, + queries: r.queries.WithTx(tx), + tx: tx, + }, nil +} + +func (r *Repository) BeginNodeSaleTx(ctx context.Context) (datagateway.NodeSaleDataGatewayWithTx, error) { + repo, err := r.begin(ctx) + if err != nil { + return nil, errors.WithStack(err) + } + return repo, nil +} + +func (r *Repository) Commit(ctx context.Context) error { + if r.tx == nil { + return nil + } + err := r.tx.Commit(ctx) + if err != nil { + return errors.Wrap(err, "failed to commit transaction") + } + r.tx = nil + return nil +} + +func (r *Repository) Rollback(ctx context.Context) error { + if r.tx == nil { + return nil + } + err := r.tx.Rollback(ctx) + if err != nil && !errors.Is(err, pgx.ErrTxClosed) { + return errors.Wrap(err, "failed to rollback transaction") + } + if err == nil { + logger.DebugContext(ctx, "rolled back transaction") + } + r.tx = nil + return nil +} diff --git a/modules/nodesale/tapscript.go b/modules/nodesale/tapscript.go new file mode 100644 index 0000000..1a7767c --- /dev/null +++ b/modules/nodesale/tapscript.go @@ -0,0 +1,25 @@ +package nodesale + +import "github.com/btcsuite/btcd/txscript" + +func extractTapScript(witness [][]byte) (tokenizer txscript.ScriptTokenizer, controlBlock *txscript.ControlBlock, isTapScript bool) { + witness = removeAnnexFromWitness(witness) + if len(witness) < 2 { + return txscript.ScriptTokenizer{}, nil, false + } + script := witness[len(witness)-2] + rawControl := witness[len(witness)-1] + parsedControl, err := txscript.ParseControlBlock(rawControl) + if err != nil { + return txscript.ScriptTokenizer{}, nil, false + } + + return txscript.MakeScriptTokenizer(0, script), parsedControl, true +} + +func removeAnnexFromWitness(witness [][]byte) [][]byte { + if len(witness) >= 2 && len(witness[len(witness)-1]) > 0 && witness[len(witness)-1][0] == txscript.TaprootAnnexTag { + return witness[:len(witness)-1] + } + return witness +} diff --git a/sqlc.yaml b/sqlc.yaml index 73e9cff..34cec2e 100644 --- a/sqlc.yaml +++ b/sqlc.yaml @@ -17,3 +17,11 @@ sql: sql_package: "pgx/v5" rename: id: "Id" + - schema: "./modules/nodesale/database/postgresql/migrations" + queries: "./modules/nodesale/database/postgresql/queries" + engine: "postgresql" + gen: + go: + package: "gen" + out: "./modules/nodesale/repository/postgres/gen" + sql_package: "pgx/v5"