withdrawal indexing tests
Some checks failed
Test / Run integration tests (pull_request) Failing after 4m13s
Test / Run compliance tests (pull_request) Successful in 4m14s
Test / Run unit tests (pull_request) Failing after 9m56s

This commit is contained in:
Roy Crihfield 2024-05-08 20:38:56 +08:00
parent 02e55e6abe
commit 671fea7999
10 changed files with 133 additions and 48 deletions

View File

@ -24,6 +24,7 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"testing" "testing"
"time"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -55,6 +56,8 @@ func setupLegacyCSVIndexer(t *testing.T) {
func setupLegacyCSV(t *testing.T) { func setupLegacyCSV(t *testing.T) {
setupLegacyCSVIndexer(t) setupLegacyCSVIndexer(t)
test.SetupLegacyTestData(t, ind) test.SetupLegacyTestData(t, ind)
t.Cleanup(func() { tearDownCSV(t) })
time.Sleep(delayForDockerSync)
} }
func dumpCSVFileData(t *testing.T) { func dumpCSVFileData(t *testing.T) {
@ -64,7 +67,7 @@ func dumpCSVFileData(t *testing.T) {
localOutputDir := filepath.Join(workingDir, file.CSVTestConfig.OutputDir) localOutputDir := filepath.Join(workingDir, file.CSVTestConfig.OutputDir)
for _, tbl := range file.Tables { for _, tbl := range schema.Tables {
err := test_helpers.DedupFile(file.TableFilePath(localOutputDir, tbl.Name)) err := test_helpers.DedupFile(file.TableFilePath(localOutputDir, tbl.Name))
require.NoError(t, err) require.NoError(t, err)
@ -89,6 +92,7 @@ func dumpCSVFileData(t *testing.T) {
func resetAndDumpWatchedAddressesCSVFileData(t *testing.T) { func resetAndDumpWatchedAddressesCSVFileData(t *testing.T) {
test_helpers.TearDownDB(t, db) test_helpers.TearDownDB(t, db)
time.Sleep(delayForDockerSync)
outputFilePath := filepath.Join(dbDirectory, file.CSVTestConfig.WatchedAddressesFilePath) outputFilePath := filepath.Join(dbDirectory, file.CSVTestConfig.WatchedAddressesFilePath)
stmt := fmt.Sprintf(pgCopyStatement, schema.TableWatchedAddresses.Name, outputFilePath) stmt := fmt.Sprintf(pgCopyStatement, schema.TableWatchedAddresses.Name, outputFilePath)
@ -111,7 +115,6 @@ func TestLegacyCSVFileIndexer(t *testing.T) {
t.Run("Publish and index header IPLDs", func(t *testing.T) { t.Run("Publish and index header IPLDs", func(t *testing.T) {
setupLegacyCSV(t) setupLegacyCSV(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.TestLegacyIndexer(t, db) test.TestLegacyIndexer(t, db)
}) })

View File

@ -21,6 +21,7 @@ import (
"math/big" "math/big"
"os" "os"
"testing" "testing"
"time"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -30,6 +31,9 @@ import (
"github.com/cerc-io/plugeth-statediff/indexer/test" "github.com/cerc-io/plugeth-statediff/indexer/test"
) )
// docker bind mount is slow to sync files
var delayForDockerSync = 1 * time.Second
func setupCSVIndexer(t *testing.T) { func setupCSVIndexer(t *testing.T) {
if _, err := os.Stat(file.CSVTestConfig.OutputDir); !errors.Is(err, os.ErrNotExist) { if _, err := os.Stat(file.CSVTestConfig.OutputDir); !errors.Is(err, os.ErrNotExist) {
err := os.RemoveAll(file.CSVTestConfig.OutputDir) err := os.RemoveAll(file.CSVTestConfig.OutputDir)
@ -53,18 +57,21 @@ func setupCSVIndexer(t *testing.T) {
func setupCSV(t *testing.T) { func setupCSV(t *testing.T) {
setupCSVIndexer(t) setupCSVIndexer(t)
test.SetupTestData(t, ind) test.SetupTestData(t, ind)
t.Cleanup(func() { tearDownCSV(t) })
time.Sleep(delayForDockerSync)
} }
func setupCSVNonCanonical(t *testing.T) { func setupCSVNonCanonical(t *testing.T) {
setupCSVIndexer(t) setupCSVIndexer(t)
test.SetupTestDataNonCanonical(t, ind) test.SetupTestDataNonCanonical(t, ind)
t.Cleanup(func() { tearDownCSV(t) })
time.Sleep(delayForDockerSync)
} }
func TestCSVFileIndexer(t *testing.T) { func TestCSVFileIndexer(t *testing.T) {
t.Run("Publish and index header IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index header IPLDs in a single tx", func(t *testing.T) {
setupCSV(t) setupCSV(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexHeaderIPLDs(t, db) test.DoTestPublishAndIndexHeaderIPLDs(t, db)
}) })
@ -72,7 +79,6 @@ func TestCSVFileIndexer(t *testing.T) {
t.Run("Publish and index transaction IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index transaction IPLDs in a single tx", func(t *testing.T) {
setupCSV(t) setupCSV(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexTransactionIPLDs(t, db) test.DoTestPublishAndIndexTransactionIPLDs(t, db)
}) })
@ -80,7 +86,6 @@ func TestCSVFileIndexer(t *testing.T) {
t.Run("Publish and index log IPLDs for multiple receipt of a specific block", func(t *testing.T) { t.Run("Publish and index log IPLDs for multiple receipt of a specific block", func(t *testing.T) {
setupCSV(t) setupCSV(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexLogIPLDs(t, db) test.DoTestPublishAndIndexLogIPLDs(t, db)
}) })
@ -88,15 +93,20 @@ func TestCSVFileIndexer(t *testing.T) {
t.Run("Publish and index receipt IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index receipt IPLDs in a single tx", func(t *testing.T) {
setupCSV(t) setupCSV(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexReceiptIPLDs(t, db) test.DoTestPublishAndIndexReceiptIPLDs(t, db)
}) })
t.Run("Publish and index withdrawal IPLDs in a single tx", func(t *testing.T) {
setupCSV(t)
dumpCSVFileData(t)
test.DoTestPublishAndIndexWithdrawalIPLDs(t, db)
})
t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) {
setupCSV(t) setupCSV(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexStateIPLDs(t, db) test.DoTestPublishAndIndexStateIPLDs(t, db)
}) })
@ -104,7 +114,6 @@ func TestCSVFileIndexer(t *testing.T) {
t.Run("Publish and index storage IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index storage IPLDs in a single tx", func(t *testing.T) {
setupCSV(t) setupCSV(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexStorageIPLDs(t, db) test.DoTestPublishAndIndexStorageIPLDs(t, db)
}) })
@ -114,7 +123,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
t.Run("Publish and index header", func(t *testing.T) { t.Run("Publish and index header", func(t *testing.T) {
setupCSVNonCanonical(t) setupCSVNonCanonical(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.TestPublishAndIndexHeaderNonCanonical(t, db) test.TestPublishAndIndexHeaderNonCanonical(t, db)
}) })
@ -122,7 +130,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
t.Run("Publish and index transactions", func(t *testing.T) { t.Run("Publish and index transactions", func(t *testing.T) {
setupCSVNonCanonical(t) setupCSVNonCanonical(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexTransactionsNonCanonical(t, db) test.DoTestPublishAndIndexTransactionsNonCanonical(t, db)
}) })
@ -130,7 +137,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
t.Run("Publish and index receipts", func(t *testing.T) { t.Run("Publish and index receipts", func(t *testing.T) {
setupCSVNonCanonical(t) setupCSVNonCanonical(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexReceiptsNonCanonical(t, db) test.DoTestPublishAndIndexReceiptsNonCanonical(t, db)
}) })
@ -138,7 +144,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
t.Run("Publish and index logs", func(t *testing.T) { t.Run("Publish and index logs", func(t *testing.T) {
setupCSVNonCanonical(t) setupCSVNonCanonical(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexLogsNonCanonical(t, db) test.DoTestPublishAndIndexLogsNonCanonical(t, db)
}) })
@ -146,7 +151,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
t.Run("Publish and index state nodes", func(t *testing.T) { t.Run("Publish and index state nodes", func(t *testing.T) {
setupCSVNonCanonical(t) setupCSVNonCanonical(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexStateNonCanonical(t, db) test.DoTestPublishAndIndexStateNonCanonical(t, db)
}) })
@ -154,7 +158,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
t.Run("Publish and index storage nodes", func(t *testing.T) { t.Run("Publish and index storage nodes", func(t *testing.T) {
setupCSVNonCanonical(t) setupCSVNonCanonical(t)
dumpCSVFileData(t) dumpCSVFileData(t)
defer tearDownCSV(t)
test.DoTestPublishAndIndexStorageNonCanonical(t, db) test.DoTestPublishAndIndexStorageNonCanonical(t, db)
}) })

View File

@ -93,6 +93,14 @@ func TestSQLFileIndexer(t *testing.T) {
test.DoTestPublishAndIndexReceiptIPLDs(t, db) test.DoTestPublishAndIndexReceiptIPLDs(t, db)
}) })
t.Run("Publish and index withdrawal IPLDs in a single tx", func(t *testing.T) {
setup(t)
dumpFileData(t)
defer tearDown(t)
test.DoTestPublishAndIndexWithdrawalIPLDs(t, db)
})
t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) {
setup(t) setup(t)
dumpFileData(t) dumpFileData(t)

View File

@ -96,6 +96,14 @@ func TestPGXIndexer(t *testing.T) {
test.DoTestPublishAndIndexReceiptIPLDs(t, db) test.DoTestPublishAndIndexReceiptIPLDs(t, db)
}) })
t.Run("Publish and index withdrawal IPLDs in a single tx", func(t *testing.T) {
setupPGX(t)
defer tearDown(t)
defer checkTxClosure(t, 1, 0, 1)
test.DoTestPublishAndIndexWithdrawalIPLDs(t, db)
})
t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) {
setupPGX(t) setupPGX(t)
defer tearDown(t) defer tearDown(t)

View File

@ -82,6 +82,14 @@ func TestSQLXIndexer(t *testing.T) {
test.DoTestPublishAndIndexReceiptIPLDs(t, db) test.DoTestPublishAndIndexReceiptIPLDs(t, db)
}) })
t.Run("Publish and index withdrawal IPLDs in a single tx", func(t *testing.T) {
setupSQLX(t)
defer tearDown(t)
defer checkTxClosure(t, 0, 0, 0)
test.DoTestPublishAndIndexWithdrawalIPLDs(t, db)
})
t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) {
setupSQLX(t) setupSQLX(t)
defer tearDown(t) defer tearDown(t)

View File

@ -92,7 +92,7 @@ func loadBlockData(t *testing.T) []testCase {
func TestFromBlockAndReceipts(t *testing.T) { func TestFromBlockAndReceipts(t *testing.T) {
testCases := loadBlockData(t) testCases := loadBlockData(t)
for _, tc := range testCases { for _, tc := range testCases {
_, _, _, err := FromBlockAndReceipts(tc.block, tc.receipts) _, _, _, _, err := FromBlockAndReceipts(tc.block, tc.receipts)
if err != nil { if err != nil {
t.Fatalf("error generating IPLDs from block and receipts, err %v, kind %s, block hash %s", err, tc.kind, tc.block.Hash()) t.Fatalf("error generating IPLDs from block and receipts, err %v, kind %s, block hash %s", err, tc.kind, tc.block.Hash())
} }

View File

@ -19,8 +19,8 @@ package mocks
import ( import (
"crypto/ecdsa" "crypto/ecdsa"
"crypto/elliptic" "crypto/elliptic"
"crypto/rand"
"math/big" "math/big"
"math/rand"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/types"
@ -39,10 +39,13 @@ import (
// Test variables // Test variables
var ( var (
// RNG for deterministically generated keys
rng = rand.New(rand.NewSource(0))
// block data // block data
TestChainConfig = params.MainnetChainConfig TestChainConfig = params.MainnetChainConfig
BlockNumber = TestChainConfig.LondonBlock BlockNumber = TestChainConfig.LondonBlock
BlockTime = *TestChainConfig.CancunTime // TODO: verify this BlockTime = *TestChainConfig.CancunTime
// canonical block at London height // canonical block at London height
// includes 5 transactions: 3 Legacy + 1 EIP-2930 + 1 EIP-1559 // includes 5 transactions: 3 Legacy + 1 EIP-2930 + 1 EIP-1559
@ -58,15 +61,19 @@ var (
Coinbase: common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476777"), Coinbase: common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476777"),
} }
MockTransactions, MockReceipts, SenderAddr = createTransactionsAndReceipts(TestChainConfig, BlockNumber, BlockTime) MockTransactions, MockReceipts, SenderAddr = createTransactionsAndReceipts(TestChainConfig, BlockNumber, BlockTime)
MockBlock = types.NewBlock(&MockHeader, MockTransactions, nil, MockReceipts, trie.NewEmpty(nil)) MockWithdrawals = types.Withdrawals{
MockHeaderRlp, _ = rlp.EncodeToBytes(MockBlock.Header()) {Index: 0, Validator: 1, Address: Address, Amount: 1000000000},
{Index: 1, Validator: 5, Address: AnotherAddress, Amount: 2000000000},
}
MockBlock = types.NewBlockWithWithdrawals(&MockHeader, MockTransactions, nil, MockReceipts, MockWithdrawals, trie.NewEmpty(nil))
MockHeaderRlp, _ = rlp.EncodeToBytes(MockBlock.Header())
// non-canonical block at London height // non-canonical block at London height
// includes 2nd and 5th transactions from the canonical block // includes 2nd and 5th transactions from the canonical block
MockNonCanonicalHeader = MockHeader MockNonCanonicalHeader = MockHeader
MockNonCanonicalBlockTransactions = types.Transactions{MockTransactions[1], MockTransactions[4]} MockNonCanonicalBlockTransactions = types.Transactions{MockTransactions[1], MockTransactions[4]}
MockNonCanonicalBlockReceipts = createNonCanonicalBlockReceipts(TestChainConfig, BlockNumber, BlockTime, MockNonCanonicalBlockTransactions) MockNonCanonicalBlockReceipts = createNonCanonicalBlockReceipts(TestChainConfig, BlockNumber, BlockTime, MockNonCanonicalBlockTransactions)
MockNonCanonicalBlock = types.NewBlock(&MockNonCanonicalHeader, MockNonCanonicalBlockTransactions, nil, MockNonCanonicalBlockReceipts, trie.NewEmpty(nil)) MockNonCanonicalBlock = types.NewBlockWithWithdrawals(&MockNonCanonicalHeader, MockNonCanonicalBlockTransactions, nil, MockNonCanonicalBlockReceipts, MockWithdrawals[:1], trie.NewEmpty(nil))
MockNonCanonicalHeaderRlp, _ = rlp.EncodeToBytes(MockNonCanonicalBlock.Header()) MockNonCanonicalHeaderRlp, _ = rlp.EncodeToBytes(MockNonCanonicalBlock.Header())
// non-canonical block at London height + 1 // non-canonical block at London height + 1
@ -86,7 +93,7 @@ var (
} }
MockNonCanonicalBlock2Transactions = types.Transactions{MockTransactions[2], MockTransactions[4]} MockNonCanonicalBlock2Transactions = types.Transactions{MockTransactions[2], MockTransactions[4]}
MockNonCanonicalBlock2Receipts = createNonCanonicalBlockReceipts(TestChainConfig, Block2Number, BlockTime, MockNonCanonicalBlock2Transactions) MockNonCanonicalBlock2Receipts = createNonCanonicalBlockReceipts(TestChainConfig, Block2Number, BlockTime, MockNonCanonicalBlock2Transactions)
MockNonCanonicalBlock2 = types.NewBlock(&MockNonCanonicalHeader2, MockNonCanonicalBlock2Transactions, nil, MockNonCanonicalBlock2Receipts, trie.NewEmpty(nil)) MockNonCanonicalBlock2 = types.NewBlockWithWithdrawals(&MockNonCanonicalHeader2, MockNonCanonicalBlock2Transactions, nil, MockNonCanonicalBlock2Receipts, types.Withdrawals{}, trie.NewEmpty(nil))
MockNonCanonicalHeader2Rlp, _ = rlp.EncodeToBytes(MockNonCanonicalBlock2.Header()) MockNonCanonicalHeader2Rlp, _ = rlp.EncodeToBytes(MockNonCanonicalBlock2.Header())
Address = common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476592") Address = common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476592")
@ -348,7 +355,10 @@ func NewLegacyData(config *params.ChainConfig) *LegacyData {
mockTransactions, mockReceipts, senderAddr := createLegacyTransactionsAndReceipts(config, blockNumber) mockTransactions, mockReceipts, senderAddr := createLegacyTransactionsAndReceipts(config, blockNumber)
mockBlock := types.NewBlock(&mockHeader, mockTransactions, nil, mockReceipts, trie.NewEmpty(nil)) mockBlock := types.NewBlock(&mockHeader, mockTransactions, nil, mockReceipts, trie.NewEmpty(nil))
mockHeaderRlp, _ := rlp.EncodeToBytes(mockBlock.Header()) mockHeaderRlp, err := rlp.EncodeToBytes(mockBlock.Header())
if err != nil {
panic(err)
}
contractAddress := crypto.CreateAddress(senderAddr, mockTransactions[2].Nonce()) contractAddress := crypto.CreateAddress(senderAddr, mockTransactions[2].Nonce())
return &LegacyData{ return &LegacyData{
@ -388,7 +398,7 @@ func createLegacyTransactionsAndReceipts(config *params.ChainConfig, blockNumber
blockTime := uint64(0) blockTime := uint64(0)
transactionSigner := types.MakeSigner(config, blockNumber, blockTime) transactionSigner := types.MakeSigner(config, blockNumber, blockTime)
mockCurve := elliptic.P256() mockCurve := elliptic.P256()
mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rand.Reader) mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rng)
if err != nil { if err != nil {
log.Crit(err.Error()) log.Crit(err.Error())
} }
@ -460,7 +470,7 @@ func createTransactionsAndReceipts(config *params.ChainConfig, blockNumber *big.
transactionSigner := types.MakeSigner(config, blockNumber, blockTime) transactionSigner := types.MakeSigner(config, blockNumber, blockTime)
mockCurve := elliptic.P256() mockCurve := elliptic.P256()
mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rand.Reader) mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rng)
if err != nil { if err != nil {
log.Crit(err.Error()) log.Crit(err.Error())
} }
@ -524,7 +534,7 @@ func createTransactionsAndReceipts(config *params.ChainConfig, blockNumber *big.
func createNonCanonicalBlockReceipts(config *params.ChainConfig, blockNumber *big.Int, blockTime uint64, transactions types.Transactions) types.Receipts { func createNonCanonicalBlockReceipts(config *params.ChainConfig, blockNumber *big.Int, blockTime uint64, transactions types.Transactions) types.Receipts {
transactionSigner := types.MakeSigner(config, blockNumber, blockTime) transactionSigner := types.MakeSigner(config, blockNumber, blockTime)
mockCurve := elliptic.P256() mockCurve := elliptic.P256()
mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rand.Reader) mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rng)
if err != nil { if err != nil {
log.Crit(err.Error()) log.Crit(err.Error())
} }

View File

@ -33,10 +33,9 @@ import (
"github.com/cerc-io/plugeth-statediff/indexer/mocks" "github.com/cerc-io/plugeth-statediff/indexer/mocks"
"github.com/cerc-io/plugeth-statediff/indexer/models" "github.com/cerc-io/plugeth-statediff/indexer/models"
"github.com/cerc-io/plugeth-statediff/indexer/shared" "github.com/cerc-io/plugeth-statediff/indexer/shared"
"github.com/cerc-io/plugeth-statediff/indexer/test_helpers"
) )
// SetupTestData indexes a single mock block along with it's state nodes // SetupTestData indexes a single mock block along with its state nodes
func SetupTestData(t *testing.T, ind interfaces.StateDiffIndexer) { func SetupTestData(t *testing.T, ind interfaces.StateDiffIndexer) {
var tx interfaces.Batch var tx interfaces.Batch
tx, err = ind.PushBlock( tx, err = ind.PushBlock(
@ -111,11 +110,11 @@ func DoTestPublishAndIndexTransactionIPLDs(t *testing.T, db sql.Database) {
t.Fatal(err) t.Fatal(err)
} }
require.Equal(t, 5, len(trxs)) require.Equal(t, 5, len(trxs))
expectTrue(t, test_helpers.ListContainsString(trxs, trx1CID.String())) require.Contains(t, trxs, trx1CID.String())
expectTrue(t, test_helpers.ListContainsString(trxs, trx2CID.String())) require.Contains(t, trxs, trx2CID.String())
expectTrue(t, test_helpers.ListContainsString(trxs, trx3CID.String())) require.Contains(t, trxs, trx3CID.String())
expectTrue(t, test_helpers.ListContainsString(trxs, trx4CID.String())) require.Contains(t, trxs, trx4CID.String())
expectTrue(t, test_helpers.ListContainsString(trxs, trx5CID.String())) require.Contains(t, trxs, trx5CID.String())
transactions := mocks.MockBlock.Transactions() transactions := mocks.MockBlock.Transactions()
type txResult struct { type txResult struct {
@ -257,11 +256,11 @@ func DoTestPublishAndIndexReceiptIPLDs(t *testing.T, db sql.Database) {
t.Fatal(err) t.Fatal(err)
} }
require.Equal(t, 5, len(rcts)) require.Equal(t, 5, len(rcts))
expectTrue(t, test_helpers.ListContainsString(rcts, rct1CID.String())) require.Contains(t, rcts, rct1CID.String())
expectTrue(t, test_helpers.ListContainsString(rcts, rct2CID.String())) require.Contains(t, rcts, rct2CID.String())
expectTrue(t, test_helpers.ListContainsString(rcts, rct3CID.String())) require.Contains(t, rcts, rct3CID.String())
expectTrue(t, test_helpers.ListContainsString(rcts, rct4CID.String())) require.Contains(t, rcts, rct4CID.String())
expectTrue(t, test_helpers.ListContainsString(rcts, rct5CID.String())) require.Contains(t, rcts, rct5CID.String())
for idx, c := range rcts { for idx, c := range rcts {
result := make([]models.IPLDModel, 0) result := make([]models.IPLDModel, 0)
@ -335,6 +334,41 @@ func DoTestPublishAndIndexReceiptIPLDs(t *testing.T, db sql.Database) {
} }
} }
func DoTestPublishAndIndexWithdrawalIPLDs(t *testing.T, db sql.Database) {
// check that withdrawals were properly indexed and published
wds := make([]string, 0)
pgStr := `SELECT withdrawal_cids.cid FROM eth.withdrawal_cids
INNER JOIN eth.header_cids ON (withdrawal_cids.header_id = header_cids.block_hash)
WHERE header_cids.block_number = $1
ORDER BY withdrawal_cids.index`
err = db.Select(context.Background(), &wds, pgStr, mocks.BlockNumber.Uint64())
if err != nil {
t.Fatal(err)
}
require.Equal(t, 2, len(wds))
require.Contains(t, wds, wd1CID.String())
require.Contains(t, wds, wd2CID.String())
for _, c := range wds {
dc, err := cid.Decode(c)
if err != nil {
t.Fatal(err)
}
var data []byte
err = db.Get(context.Background(), &data, ipfsPgGet, dc.String(), mocks.BlockNumber.Uint64())
if err != nil {
t.Fatal(err)
}
switch c {
case wd1CID.String():
require.Equal(t, wd1, data)
case wd2CID.String():
require.Equal(t, wd2, data)
}
}
}
func DoTestPublishAndIndexStateIPLDs(t *testing.T, db sql.Database) { func DoTestPublishAndIndexStateIPLDs(t *testing.T, db sql.Database) {
// check that state nodes were properly indexed and published // check that state nodes were properly indexed and published
stateNodes := make([]models.StateNodeModel, 0) stateNodes := make([]models.StateNodeModel, 0)
@ -594,7 +628,7 @@ func SetupTestDataNonCanonical(t *testing.T, ind interfaces.StateDiffIndexer) {
func TestPublishAndIndexHeaderNonCanonical(t *testing.T, db sql.Database) { func TestPublishAndIndexHeaderNonCanonical(t *testing.T, db sql.Database) {
// check indexed headers // check indexed headers
pgStr := `SELECT CAST(block_number as TEXT), block_hash, cid, cast(td AS TEXT), cast(reward AS TEXT), pgStr := `SELECT CAST(block_number as TEXT), block_hash, cid, cast(td AS TEXT), cast(reward AS TEXT),
tx_root, receipt_root, uncles_hash, coinbase tx_root, receipt_root, uncles_hash, coinbase, withdrawals_root
FROM eth.header_cids FROM eth.header_cids
ORDER BY block_number` ORDER BY block_number`
headerRes := make([]models.HeaderModel, 0) headerRes := make([]models.HeaderModel, 0)
@ -616,6 +650,7 @@ func TestPublishAndIndexHeaderNonCanonical(t *testing.T, db sql.Database) {
RctRoot: mockBlock.ReceiptHash().String(), RctRoot: mockBlock.ReceiptHash().String(),
UnclesHash: mockBlock.UncleHash().String(), UnclesHash: mockBlock.UncleHash().String(),
Coinbase: mocks.MockHeader.Coinbase.String(), Coinbase: mocks.MockHeader.Coinbase.String(),
WithdrawalsRoot: shared.MaybeStringHash(mockBlock.Header().WithdrawalsHash),
}, },
{ {
BlockNumber: mockNonCanonicalBlock.Number().String(), BlockNumber: mockNonCanonicalBlock.Number().String(),
@ -626,6 +661,7 @@ func TestPublishAndIndexHeaderNonCanonical(t *testing.T, db sql.Database) {
RctRoot: mockNonCanonicalBlock.ReceiptHash().String(), RctRoot: mockNonCanonicalBlock.ReceiptHash().String(),
UnclesHash: mockNonCanonicalBlock.UncleHash().String(), UnclesHash: mockNonCanonicalBlock.UncleHash().String(),
Coinbase: mocks.MockNonCanonicalHeader.Coinbase.String(), Coinbase: mocks.MockNonCanonicalHeader.Coinbase.String(),
WithdrawalsRoot: shared.MaybeStringHash(mockNonCanonicalBlock.Header().WithdrawalsHash),
}, },
{ {
BlockNumber: mockNonCanonicalBlock2.Number().String(), BlockNumber: mockNonCanonicalBlock2.Number().String(),
@ -636,6 +672,7 @@ func TestPublishAndIndexHeaderNonCanonical(t *testing.T, db sql.Database) {
RctRoot: mockNonCanonicalBlock2.ReceiptHash().String(), RctRoot: mockNonCanonicalBlock2.ReceiptHash().String(),
UnclesHash: mockNonCanonicalBlock2.UncleHash().String(), UnclesHash: mockNonCanonicalBlock2.UncleHash().String(),
Coinbase: mocks.MockNonCanonicalHeader2.Coinbase.String(), Coinbase: mocks.MockNonCanonicalHeader2.Coinbase.String(),
WithdrawalsRoot: shared.MaybeStringHash(mockNonCanonicalBlock2.Header().WithdrawalsHash),
}, },
} }
expectedRes[0].Reward = shared.CalcEthBlockReward(mockBlock.Header(), mockBlock.Uncles(), mockBlock.Transactions(), mocks.MockReceipts).String() expectedRes[0].Reward = shared.CalcEthBlockReward(mockBlock.Header(), mockBlock.Uncles(), mockBlock.Transactions(), mocks.MockReceipts).String()

View File

@ -38,12 +38,14 @@ var (
watchedAddressesPgGet = `SELECT * watchedAddressesPgGet = `SELECT *
FROM eth_meta.watched_addresses` FROM eth_meta.watched_addresses`
tx1, tx2, tx3, tx4, tx5, rct1, rct2, rct3, rct4, rct5 []byte tx1, tx2, tx3, tx4, tx5, rct1, rct2, rct3, rct4, rct5 []byte
wd1, wd2 []byte
nonCanonicalBlockRct1, nonCanonicalBlockRct2 []byte nonCanonicalBlockRct1, nonCanonicalBlockRct2 []byte
nonCanonicalBlock2Rct1, nonCanonicalBlock2Rct2 []byte nonCanonicalBlock2Rct1, nonCanonicalBlock2Rct2 []byte
mockBlock, mockNonCanonicalBlock, mockNonCanonicalBlock2 *types.Block mockBlock, mockNonCanonicalBlock, mockNonCanonicalBlock2 *types.Block
headerCID, mockNonCanonicalHeaderCID, mockNonCanonicalHeader2CID cid.Cid headerCID, mockNonCanonicalHeaderCID, mockNonCanonicalHeader2CID cid.Cid
trx1CID, trx2CID, trx3CID, trx4CID, trx5CID cid.Cid trx1CID, trx2CID, trx3CID, trx4CID, trx5CID cid.Cid
rct1CID, rct2CID, rct3CID, rct4CID, rct5CID cid.Cid rct1CID, rct2CID, rct3CID, rct4CID, rct5CID cid.Cid
wd1CID, wd2CID cid.Cid
nonCanonicalBlockRct1CID, nonCanonicalBlockRct2CID cid.Cid nonCanonicalBlockRct1CID, nonCanonicalBlockRct2CID cid.Cid
nonCanonicalBlock2Rct1CID, nonCanonicalBlock2Rct2CID cid.Cid nonCanonicalBlock2Rct1CID, nonCanonicalBlock2Rct2CID cid.Cid
state1CID, state2CID, storageCID cid.Cid state1CID, state2CID, storageCID cid.Cid
@ -114,6 +116,18 @@ func init() {
copy(rct5, buf.Bytes()) copy(rct5, buf.Bytes())
buf.Reset() buf.Reset()
// encode mock withdrawals
// wds
mocks.MockWithdrawals.EncodeIndex(0, buf)
wd1 = make([]byte, buf.Len())
copy(wd1, buf.Bytes())
buf.Reset()
mocks.MockWithdrawals.EncodeIndex(1, buf)
wd2 = make([]byte, buf.Len())
copy(wd2, buf.Bytes())
buf.Reset()
// encode mock receipts for non-canonical blocks // encode mock receipts for non-canonical blocks
nonCanonicalBlockRcts.EncodeIndex(0, buf) nonCanonicalBlockRcts.EncodeIndex(0, buf)
nonCanonicalBlockRct1 = make([]byte, buf.Len()) nonCanonicalBlockRct1 = make([]byte, buf.Len())
@ -152,6 +166,9 @@ func init() {
rct4CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, rct4, multihash.KECCAK_256) rct4CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, rct4, multihash.KECCAK_256)
rct5CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, rct5, multihash.KECCAK_256) rct5CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, rct5, multihash.KECCAK_256)
wd1CID, _ = ipld.RawdataToCid(ipld.MEthWithdrawal, wd1, multihash.KECCAK_256)
wd2CID, _ = ipld.RawdataToCid(ipld.MEthWithdrawal, wd2, multihash.KECCAK_256)
// create raw receipts for non-canonical blocks // create raw receipts for non-canonical blocks
nonCanonicalBlockRct1CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, nonCanonicalBlockRct1, multihash.KECCAK_256) nonCanonicalBlockRct1CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, nonCanonicalBlockRct1, multihash.KECCAK_256)
nonCanonicalBlockRct2CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, nonCanonicalBlockRct2, multihash.KECCAK_256) nonCanonicalBlockRct2CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, nonCanonicalBlockRct2, multihash.KECCAK_256)

View File

@ -25,16 +25,6 @@ import (
"github.com/cerc-io/plugeth-statediff/indexer/database/sql" "github.com/cerc-io/plugeth-statediff/indexer/database/sql"
) )
// ListContainsString used to check if a list of strings contains a particular string
func ListContainsString(sss []string, s string) bool {
for _, str := range sss {
if s == str {
return true
}
}
return false
}
// DedupFile removes duplicates from the given file // DedupFile removes duplicates from the given file
func DedupFile(filePath string) error { func DedupFile(filePath string) error {
f, err := os.OpenFile(filePath, os.O_CREATE|os.O_RDONLY, os.ModePerm) f, err := os.OpenFile(filePath, os.O_CREATE|os.O_RDONLY, os.ModePerm)
@ -86,6 +76,7 @@ func TearDownDB(t *testing.T, db sql.Database) {
`TRUNCATE eth.state_cids`, `TRUNCATE eth.state_cids`,
`TRUNCATE eth.storage_cids`, `TRUNCATE eth.storage_cids`,
`TRUNCATE eth.log_cids`, `TRUNCATE eth.log_cids`,
`TRUNCATE eth.withdrawal_cids`,
`TRUNCATE eth_meta.watched_addresses`, `TRUNCATE eth_meta.watched_addresses`,
} }
for _, stm := range statements { for _, stm := range statements {