withdrawal indexing tests
This commit is contained in:
parent
3cd43e8d14
commit
2115770095
@ -24,6 +24,7 @@ import (
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
@ -55,6 +56,8 @@ func setupLegacyCSVIndexer(t *testing.T) {
|
||||
func setupLegacyCSV(t *testing.T) {
|
||||
setupLegacyCSVIndexer(t)
|
||||
test.SetupLegacyTestData(t, ind)
|
||||
t.Cleanup(func() { tearDownCSV(t) })
|
||||
time.Sleep(delayForDockerSync)
|
||||
}
|
||||
|
||||
func dumpCSVFileData(t *testing.T) {
|
||||
@ -64,7 +67,7 @@ func dumpCSVFileData(t *testing.T) {
|
||||
|
||||
localOutputDir := filepath.Join(workingDir, file.CSVTestConfig.OutputDir)
|
||||
|
||||
for _, tbl := range file.Tables {
|
||||
for _, tbl := range schema.Tables {
|
||||
err := test_helpers.DedupFile(file.TableFilePath(localOutputDir, tbl.Name))
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -89,6 +92,7 @@ func dumpCSVFileData(t *testing.T) {
|
||||
func resetAndDumpWatchedAddressesCSVFileData(t *testing.T) {
|
||||
test_helpers.TearDownDB(t, db)
|
||||
|
||||
time.Sleep(delayForDockerSync)
|
||||
outputFilePath := filepath.Join(dbDirectory, file.CSVTestConfig.WatchedAddressesFilePath)
|
||||
stmt := fmt.Sprintf(pgCopyStatement, schema.TableWatchedAddresses.Name, outputFilePath)
|
||||
|
||||
@ -111,7 +115,6 @@ func TestLegacyCSVFileIndexer(t *testing.T) {
|
||||
t.Run("Publish and index header IPLDs", func(t *testing.T) {
|
||||
setupLegacyCSV(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.TestLegacyIndexer(t, db)
|
||||
})
|
||||
|
@ -21,6 +21,7 @@ import (
|
||||
"math/big"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
@ -30,6 +31,9 @@ import (
|
||||
"github.com/cerc-io/plugeth-statediff/indexer/test"
|
||||
)
|
||||
|
||||
// docker bind mount is slow to sync files
|
||||
var delayForDockerSync = 1 * time.Second
|
||||
|
||||
func setupCSVIndexer(t *testing.T) {
|
||||
if _, err := os.Stat(file.CSVTestConfig.OutputDir); !errors.Is(err, os.ErrNotExist) {
|
||||
err := os.RemoveAll(file.CSVTestConfig.OutputDir)
|
||||
@ -53,18 +57,21 @@ func setupCSVIndexer(t *testing.T) {
|
||||
func setupCSV(t *testing.T) {
|
||||
setupCSVIndexer(t)
|
||||
test.SetupTestData(t, ind)
|
||||
t.Cleanup(func() { tearDownCSV(t) })
|
||||
time.Sleep(delayForDockerSync)
|
||||
}
|
||||
|
||||
func setupCSVNonCanonical(t *testing.T) {
|
||||
setupCSVIndexer(t)
|
||||
test.SetupTestDataNonCanonical(t, ind)
|
||||
t.Cleanup(func() { tearDownCSV(t) })
|
||||
time.Sleep(delayForDockerSync)
|
||||
}
|
||||
|
||||
func TestCSVFileIndexer(t *testing.T) {
|
||||
t.Run("Publish and index header IPLDs in a single tx", func(t *testing.T) {
|
||||
setupCSV(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexHeaderIPLDs(t, db)
|
||||
})
|
||||
@ -72,7 +79,6 @@ func TestCSVFileIndexer(t *testing.T) {
|
||||
t.Run("Publish and index transaction IPLDs in a single tx", func(t *testing.T) {
|
||||
setupCSV(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexTransactionIPLDs(t, db)
|
||||
})
|
||||
@ -80,7 +86,6 @@ func TestCSVFileIndexer(t *testing.T) {
|
||||
t.Run("Publish and index log IPLDs for multiple receipt of a specific block", func(t *testing.T) {
|
||||
setupCSV(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexLogIPLDs(t, db)
|
||||
})
|
||||
@ -88,15 +93,20 @@ func TestCSVFileIndexer(t *testing.T) {
|
||||
t.Run("Publish and index receipt IPLDs in a single tx", func(t *testing.T) {
|
||||
setupCSV(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexReceiptIPLDs(t, db)
|
||||
})
|
||||
|
||||
t.Run("Publish and index withdrawal IPLDs in a single tx", func(t *testing.T) {
|
||||
setupCSV(t)
|
||||
dumpCSVFileData(t)
|
||||
|
||||
test.DoTestPublishAndIndexWithdrawalIPLDs(t, db)
|
||||
})
|
||||
|
||||
t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) {
|
||||
setupCSV(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexStateIPLDs(t, db)
|
||||
})
|
||||
@ -104,7 +114,6 @@ func TestCSVFileIndexer(t *testing.T) {
|
||||
t.Run("Publish and index storage IPLDs in a single tx", func(t *testing.T) {
|
||||
setupCSV(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexStorageIPLDs(t, db)
|
||||
})
|
||||
@ -114,7 +123,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
|
||||
t.Run("Publish and index header", func(t *testing.T) {
|
||||
setupCSVNonCanonical(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.TestPublishAndIndexHeaderNonCanonical(t, db)
|
||||
})
|
||||
@ -122,7 +130,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
|
||||
t.Run("Publish and index transactions", func(t *testing.T) {
|
||||
setupCSVNonCanonical(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexTransactionsNonCanonical(t, db)
|
||||
})
|
||||
@ -130,7 +137,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
|
||||
t.Run("Publish and index receipts", func(t *testing.T) {
|
||||
setupCSVNonCanonical(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexReceiptsNonCanonical(t, db)
|
||||
})
|
||||
@ -138,7 +144,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
|
||||
t.Run("Publish and index logs", func(t *testing.T) {
|
||||
setupCSVNonCanonical(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexLogsNonCanonical(t, db)
|
||||
})
|
||||
@ -146,7 +151,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
|
||||
t.Run("Publish and index state nodes", func(t *testing.T) {
|
||||
setupCSVNonCanonical(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexStateNonCanonical(t, db)
|
||||
})
|
||||
@ -154,7 +158,6 @@ func TestCSVFileIndexerNonCanonical(t *testing.T) {
|
||||
t.Run("Publish and index storage nodes", func(t *testing.T) {
|
||||
setupCSVNonCanonical(t)
|
||||
dumpCSVFileData(t)
|
||||
defer tearDownCSV(t)
|
||||
|
||||
test.DoTestPublishAndIndexStorageNonCanonical(t, db)
|
||||
})
|
||||
|
@ -93,6 +93,14 @@ func TestSQLFileIndexer(t *testing.T) {
|
||||
test.DoTestPublishAndIndexReceiptIPLDs(t, db)
|
||||
})
|
||||
|
||||
t.Run("Publish and index withdrawal IPLDs in a single tx", func(t *testing.T) {
|
||||
setup(t)
|
||||
dumpFileData(t)
|
||||
defer tearDown(t)
|
||||
|
||||
test.DoTestPublishAndIndexWithdrawalIPLDs(t, db)
|
||||
})
|
||||
|
||||
t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) {
|
||||
setup(t)
|
||||
dumpFileData(t)
|
||||
|
@ -96,6 +96,14 @@ func TestPGXIndexer(t *testing.T) {
|
||||
test.DoTestPublishAndIndexReceiptIPLDs(t, db)
|
||||
})
|
||||
|
||||
t.Run("Publish and index withdrawal IPLDs in a single tx", func(t *testing.T) {
|
||||
setupPGX(t)
|
||||
defer tearDown(t)
|
||||
defer checkTxClosure(t, 1, 0, 1)
|
||||
|
||||
test.DoTestPublishAndIndexWithdrawalIPLDs(t, db)
|
||||
})
|
||||
|
||||
t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) {
|
||||
setupPGX(t)
|
||||
defer tearDown(t)
|
||||
|
@ -82,6 +82,14 @@ func TestSQLXIndexer(t *testing.T) {
|
||||
test.DoTestPublishAndIndexReceiptIPLDs(t, db)
|
||||
})
|
||||
|
||||
t.Run("Publish and index withdrawal IPLDs in a single tx", func(t *testing.T) {
|
||||
setupSQLX(t)
|
||||
defer tearDown(t)
|
||||
defer checkTxClosure(t, 0, 0, 0)
|
||||
|
||||
test.DoTestPublishAndIndexWithdrawalIPLDs(t, db)
|
||||
})
|
||||
|
||||
t.Run("Publish and index state IPLDs in a single tx", func(t *testing.T) {
|
||||
setupSQLX(t)
|
||||
defer tearDown(t)
|
||||
|
@ -92,7 +92,7 @@ func loadBlockData(t *testing.T) []testCase {
|
||||
func TestFromBlockAndReceipts(t *testing.T) {
|
||||
testCases := loadBlockData(t)
|
||||
for _, tc := range testCases {
|
||||
_, _, _, err := FromBlockAndReceipts(tc.block, tc.receipts)
|
||||
_, _, _, _, err := FromBlockAndReceipts(tc.block, tc.receipts)
|
||||
if err != nil {
|
||||
t.Fatalf("error generating IPLDs from block and receipts, err %v, kind %s, block hash %s", err, tc.kind, tc.block.Hash())
|
||||
}
|
||||
|
@ -19,8 +19,8 @@ package mocks
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"math/big"
|
||||
"math/rand"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common"
|
||||
"github.com/ethereum/go-ethereum/core/types"
|
||||
@ -39,6 +39,9 @@ import (
|
||||
|
||||
// Test variables
|
||||
var (
|
||||
// RNG for deterministically generated keys
|
||||
rng = rand.New(rand.NewSource(0))
|
||||
|
||||
// block data
|
||||
TestChainConfig = params.MainnetChainConfig
|
||||
BlockNumber = TestChainConfig.LondonBlock
|
||||
@ -58,7 +61,11 @@ var (
|
||||
Coinbase: common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476777"),
|
||||
}
|
||||
MockTransactions, MockReceipts, SenderAddr = createTransactionsAndReceipts(TestChainConfig, BlockNumber, BlockTime)
|
||||
MockBlock = types.NewBlock(&MockHeader, MockTransactions, nil, MockReceipts, trie.NewEmpty(nil))
|
||||
MockWithdrawals = types.Withdrawals{
|
||||
{Index: 0, Validator: 1, Address: Address, Amount: 1000000000},
|
||||
{Index: 1, Validator: 5, Address: AnotherAddress, Amount: 2000000000},
|
||||
}
|
||||
MockBlock = types.NewBlockWithWithdrawals(&MockHeader, MockTransactions, nil, MockReceipts, MockWithdrawals, trie.NewEmpty(nil))
|
||||
MockHeaderRlp, _ = rlp.EncodeToBytes(MockBlock.Header())
|
||||
|
||||
// non-canonical block at London height
|
||||
@ -66,7 +73,7 @@ var (
|
||||
MockNonCanonicalHeader = MockHeader
|
||||
MockNonCanonicalBlockTransactions = types.Transactions{MockTransactions[1], MockTransactions[4]}
|
||||
MockNonCanonicalBlockReceipts = createNonCanonicalBlockReceipts(TestChainConfig, BlockNumber, BlockTime, MockNonCanonicalBlockTransactions)
|
||||
MockNonCanonicalBlock = types.NewBlock(&MockNonCanonicalHeader, MockNonCanonicalBlockTransactions, nil, MockNonCanonicalBlockReceipts, trie.NewEmpty(nil))
|
||||
MockNonCanonicalBlock = types.NewBlockWithWithdrawals(&MockNonCanonicalHeader, MockNonCanonicalBlockTransactions, nil, MockNonCanonicalBlockReceipts, MockWithdrawals[:1], trie.NewEmpty(nil))
|
||||
MockNonCanonicalHeaderRlp, _ = rlp.EncodeToBytes(MockNonCanonicalBlock.Header())
|
||||
|
||||
// non-canonical block at London height + 1
|
||||
@ -86,7 +93,7 @@ var (
|
||||
}
|
||||
MockNonCanonicalBlock2Transactions = types.Transactions{MockTransactions[2], MockTransactions[4]}
|
||||
MockNonCanonicalBlock2Receipts = createNonCanonicalBlockReceipts(TestChainConfig, Block2Number, BlockTime, MockNonCanonicalBlock2Transactions)
|
||||
MockNonCanonicalBlock2 = types.NewBlock(&MockNonCanonicalHeader2, MockNonCanonicalBlock2Transactions, nil, MockNonCanonicalBlock2Receipts, trie.NewEmpty(nil))
|
||||
MockNonCanonicalBlock2 = types.NewBlockWithWithdrawals(&MockNonCanonicalHeader2, MockNonCanonicalBlock2Transactions, nil, MockNonCanonicalBlock2Receipts, types.Withdrawals{}, trie.NewEmpty(nil))
|
||||
MockNonCanonicalHeader2Rlp, _ = rlp.EncodeToBytes(MockNonCanonicalBlock2.Header())
|
||||
|
||||
Address = common.HexToAddress("0xaE9BEa628c4Ce503DcFD7E305CaB4e29E7476592")
|
||||
@ -348,7 +355,10 @@ func NewLegacyData(config *params.ChainConfig) *LegacyData {
|
||||
|
||||
mockTransactions, mockReceipts, senderAddr := createLegacyTransactionsAndReceipts(config, blockNumber)
|
||||
mockBlock := types.NewBlock(&mockHeader, mockTransactions, nil, mockReceipts, trie.NewEmpty(nil))
|
||||
mockHeaderRlp, _ := rlp.EncodeToBytes(mockBlock.Header())
|
||||
mockHeaderRlp, err := rlp.EncodeToBytes(mockBlock.Header())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
contractAddress := crypto.CreateAddress(senderAddr, mockTransactions[2].Nonce())
|
||||
|
||||
return &LegacyData{
|
||||
@ -388,7 +398,7 @@ func createLegacyTransactionsAndReceipts(config *params.ChainConfig, blockNumber
|
||||
blockTime := uint64(0)
|
||||
transactionSigner := types.MakeSigner(config, blockNumber, blockTime)
|
||||
mockCurve := elliptic.P256()
|
||||
mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rand.Reader)
|
||||
mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rng)
|
||||
if err != nil {
|
||||
log.Crit(err.Error())
|
||||
}
|
||||
@ -460,7 +470,7 @@ func createTransactionsAndReceipts(config *params.ChainConfig, blockNumber *big.
|
||||
|
||||
transactionSigner := types.MakeSigner(config, blockNumber, blockTime)
|
||||
mockCurve := elliptic.P256()
|
||||
mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rand.Reader)
|
||||
mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rng)
|
||||
if err != nil {
|
||||
log.Crit(err.Error())
|
||||
}
|
||||
@ -524,7 +534,7 @@ func createTransactionsAndReceipts(config *params.ChainConfig, blockNumber *big.
|
||||
func createNonCanonicalBlockReceipts(config *params.ChainConfig, blockNumber *big.Int, blockTime uint64, transactions types.Transactions) types.Receipts {
|
||||
transactionSigner := types.MakeSigner(config, blockNumber, blockTime)
|
||||
mockCurve := elliptic.P256()
|
||||
mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rand.Reader)
|
||||
mockPrvKey, err := ecdsa.GenerateKey(mockCurve, rng)
|
||||
if err != nil {
|
||||
log.Crit(err.Error())
|
||||
}
|
||||
|
@ -33,10 +33,9 @@ import (
|
||||
"github.com/cerc-io/plugeth-statediff/indexer/mocks"
|
||||
"github.com/cerc-io/plugeth-statediff/indexer/models"
|
||||
"github.com/cerc-io/plugeth-statediff/indexer/shared"
|
||||
"github.com/cerc-io/plugeth-statediff/indexer/test_helpers"
|
||||
)
|
||||
|
||||
// SetupTestData indexes a single mock block along with it's state nodes
|
||||
// SetupTestData indexes a single mock block along with its state nodes
|
||||
func SetupTestData(t *testing.T, ind interfaces.StateDiffIndexer) {
|
||||
var tx interfaces.Batch
|
||||
tx, err = ind.PushBlock(
|
||||
@ -111,11 +110,11 @@ func DoTestPublishAndIndexTransactionIPLDs(t *testing.T, db sql.Database) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
require.Equal(t, 5, len(trxs))
|
||||
expectTrue(t, test_helpers.ListContainsString(trxs, trx1CID.String()))
|
||||
expectTrue(t, test_helpers.ListContainsString(trxs, trx2CID.String()))
|
||||
expectTrue(t, test_helpers.ListContainsString(trxs, trx3CID.String()))
|
||||
expectTrue(t, test_helpers.ListContainsString(trxs, trx4CID.String()))
|
||||
expectTrue(t, test_helpers.ListContainsString(trxs, trx5CID.String()))
|
||||
require.Contains(t, trxs, trx1CID.String())
|
||||
require.Contains(t, trxs, trx2CID.String())
|
||||
require.Contains(t, trxs, trx3CID.String())
|
||||
require.Contains(t, trxs, trx4CID.String())
|
||||
require.Contains(t, trxs, trx5CID.String())
|
||||
|
||||
transactions := mocks.MockBlock.Transactions()
|
||||
type txResult struct {
|
||||
@ -257,11 +256,11 @@ func DoTestPublishAndIndexReceiptIPLDs(t *testing.T, db sql.Database) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
require.Equal(t, 5, len(rcts))
|
||||
expectTrue(t, test_helpers.ListContainsString(rcts, rct1CID.String()))
|
||||
expectTrue(t, test_helpers.ListContainsString(rcts, rct2CID.String()))
|
||||
expectTrue(t, test_helpers.ListContainsString(rcts, rct3CID.String()))
|
||||
expectTrue(t, test_helpers.ListContainsString(rcts, rct4CID.String()))
|
||||
expectTrue(t, test_helpers.ListContainsString(rcts, rct5CID.String()))
|
||||
require.Contains(t, rcts, rct1CID.String())
|
||||
require.Contains(t, rcts, rct2CID.String())
|
||||
require.Contains(t, rcts, rct3CID.String())
|
||||
require.Contains(t, rcts, rct4CID.String())
|
||||
require.Contains(t, rcts, rct5CID.String())
|
||||
|
||||
for idx, c := range rcts {
|
||||
result := make([]models.IPLDModel, 0)
|
||||
@ -335,6 +334,41 @@ func DoTestPublishAndIndexReceiptIPLDs(t *testing.T, db sql.Database) {
|
||||
}
|
||||
}
|
||||
|
||||
func DoTestPublishAndIndexWithdrawalIPLDs(t *testing.T, db sql.Database) {
|
||||
// check that withdrawals were properly indexed and published
|
||||
wds := make([]string, 0)
|
||||
pgStr := `SELECT withdrawal_cids.cid FROM eth.withdrawal_cids
|
||||
INNER JOIN eth.header_cids ON (withdrawal_cids.header_id = header_cids.block_hash)
|
||||
WHERE header_cids.block_number = $1
|
||||
ORDER BY withdrawal_cids.index`
|
||||
err = db.Select(context.Background(), &wds, pgStr, mocks.BlockNumber.Uint64())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
require.Equal(t, 2, len(wds))
|
||||
require.Contains(t, wds, wd1CID.String())
|
||||
require.Contains(t, wds, wd2CID.String())
|
||||
|
||||
for _, c := range wds {
|
||||
dc, err := cid.Decode(c)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
var data []byte
|
||||
err = db.Get(context.Background(), &data, ipfsPgGet, dc.String(), mocks.BlockNumber.Uint64())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
switch c {
|
||||
case wd1CID.String():
|
||||
require.Equal(t, wd1, data)
|
||||
case wd2CID.String():
|
||||
require.Equal(t, wd2, data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func DoTestPublishAndIndexStateIPLDs(t *testing.T, db sql.Database) {
|
||||
// check that state nodes were properly indexed and published
|
||||
stateNodes := make([]models.StateNodeModel, 0)
|
||||
@ -594,7 +628,7 @@ func SetupTestDataNonCanonical(t *testing.T, ind interfaces.StateDiffIndexer) {
|
||||
func TestPublishAndIndexHeaderNonCanonical(t *testing.T, db sql.Database) {
|
||||
// check indexed headers
|
||||
pgStr := `SELECT CAST(block_number as TEXT), block_hash, cid, cast(td AS TEXT), cast(reward AS TEXT),
|
||||
tx_root, receipt_root, uncles_hash, coinbase
|
||||
tx_root, receipt_root, uncles_hash, coinbase, withdrawals_root
|
||||
FROM eth.header_cids
|
||||
ORDER BY block_number`
|
||||
headerRes := make([]models.HeaderModel, 0)
|
||||
@ -616,6 +650,7 @@ func TestPublishAndIndexHeaderNonCanonical(t *testing.T, db sql.Database) {
|
||||
RctRoot: mockBlock.ReceiptHash().String(),
|
||||
UnclesHash: mockBlock.UncleHash().String(),
|
||||
Coinbase: mocks.MockHeader.Coinbase.String(),
|
||||
WithdrawalsRoot: shared.MaybeStringHash(mockBlock.Header().WithdrawalsHash),
|
||||
},
|
||||
{
|
||||
BlockNumber: mockNonCanonicalBlock.Number().String(),
|
||||
@ -626,6 +661,7 @@ func TestPublishAndIndexHeaderNonCanonical(t *testing.T, db sql.Database) {
|
||||
RctRoot: mockNonCanonicalBlock.ReceiptHash().String(),
|
||||
UnclesHash: mockNonCanonicalBlock.UncleHash().String(),
|
||||
Coinbase: mocks.MockNonCanonicalHeader.Coinbase.String(),
|
||||
WithdrawalsRoot: shared.MaybeStringHash(mockNonCanonicalBlock.Header().WithdrawalsHash),
|
||||
},
|
||||
{
|
||||
BlockNumber: mockNonCanonicalBlock2.Number().String(),
|
||||
@ -636,6 +672,7 @@ func TestPublishAndIndexHeaderNonCanonical(t *testing.T, db sql.Database) {
|
||||
RctRoot: mockNonCanonicalBlock2.ReceiptHash().String(),
|
||||
UnclesHash: mockNonCanonicalBlock2.UncleHash().String(),
|
||||
Coinbase: mocks.MockNonCanonicalHeader2.Coinbase.String(),
|
||||
WithdrawalsRoot: shared.MaybeStringHash(mockNonCanonicalBlock2.Header().WithdrawalsHash),
|
||||
},
|
||||
}
|
||||
expectedRes[0].Reward = shared.CalcEthBlockReward(mockBlock.Header(), mockBlock.Uncles(), mockBlock.Transactions(), mocks.MockReceipts).String()
|
||||
|
@ -38,12 +38,14 @@ var (
|
||||
watchedAddressesPgGet = `SELECT *
|
||||
FROM eth_meta.watched_addresses`
|
||||
tx1, tx2, tx3, tx4, tx5, rct1, rct2, rct3, rct4, rct5 []byte
|
||||
wd1, wd2 []byte
|
||||
nonCanonicalBlockRct1, nonCanonicalBlockRct2 []byte
|
||||
nonCanonicalBlock2Rct1, nonCanonicalBlock2Rct2 []byte
|
||||
mockBlock, mockNonCanonicalBlock, mockNonCanonicalBlock2 *types.Block
|
||||
headerCID, mockNonCanonicalHeaderCID, mockNonCanonicalHeader2CID cid.Cid
|
||||
trx1CID, trx2CID, trx3CID, trx4CID, trx5CID cid.Cid
|
||||
rct1CID, rct2CID, rct3CID, rct4CID, rct5CID cid.Cid
|
||||
wd1CID, wd2CID cid.Cid
|
||||
nonCanonicalBlockRct1CID, nonCanonicalBlockRct2CID cid.Cid
|
||||
nonCanonicalBlock2Rct1CID, nonCanonicalBlock2Rct2CID cid.Cid
|
||||
state1CID, state2CID, storageCID cid.Cid
|
||||
@ -114,6 +116,18 @@ func init() {
|
||||
copy(rct5, buf.Bytes())
|
||||
buf.Reset()
|
||||
|
||||
// encode mock withdrawals
|
||||
// wds
|
||||
mocks.MockWithdrawals.EncodeIndex(0, buf)
|
||||
wd1 = make([]byte, buf.Len())
|
||||
copy(wd1, buf.Bytes())
|
||||
buf.Reset()
|
||||
|
||||
mocks.MockWithdrawals.EncodeIndex(1, buf)
|
||||
wd2 = make([]byte, buf.Len())
|
||||
copy(wd2, buf.Bytes())
|
||||
buf.Reset()
|
||||
|
||||
// encode mock receipts for non-canonical blocks
|
||||
nonCanonicalBlockRcts.EncodeIndex(0, buf)
|
||||
nonCanonicalBlockRct1 = make([]byte, buf.Len())
|
||||
@ -152,6 +166,9 @@ func init() {
|
||||
rct4CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, rct4, multihash.KECCAK_256)
|
||||
rct5CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, rct5, multihash.KECCAK_256)
|
||||
|
||||
wd1CID, _ = ipld.RawdataToCid(ipld.MEthWithdrawal, wd1, multihash.KECCAK_256)
|
||||
wd2CID, _ = ipld.RawdataToCid(ipld.MEthWithdrawal, wd2, multihash.KECCAK_256)
|
||||
|
||||
// create raw receipts for non-canonical blocks
|
||||
nonCanonicalBlockRct1CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, nonCanonicalBlockRct1, multihash.KECCAK_256)
|
||||
nonCanonicalBlockRct2CID, _ = ipld.RawdataToCid(ipld.MEthTxReceipt, nonCanonicalBlockRct2, multihash.KECCAK_256)
|
||||
|
@ -25,16 +25,6 @@ import (
|
||||
"github.com/cerc-io/plugeth-statediff/indexer/database/sql"
|
||||
)
|
||||
|
||||
// ListContainsString used to check if a list of strings contains a particular string
|
||||
func ListContainsString(sss []string, s string) bool {
|
||||
for _, str := range sss {
|
||||
if s == str {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// DedupFile removes duplicates from the given file
|
||||
func DedupFile(filePath string) error {
|
||||
f, err := os.OpenFile(filePath, os.O_CREATE|os.O_RDONLY, os.ModePerm)
|
||||
@ -86,6 +76,7 @@ func TearDownDB(t *testing.T, db sql.Database) {
|
||||
`TRUNCATE eth.state_cids`,
|
||||
`TRUNCATE eth.storage_cids`,
|
||||
`TRUNCATE eth.log_cids`,
|
||||
`TRUNCATE eth.withdrawal_cids`,
|
||||
`TRUNCATE eth_meta.watched_addresses`,
|
||||
}
|
||||
for _, stm := range statements {
|
||||
|
@ -22,6 +22,7 @@ func ClearDB(db *sqlx.DB) error {
|
||||
`TRUNCATE eth.state_cids`,
|
||||
`TRUNCATE eth.storage_cids`,
|
||||
`TRUNCATE eth.log_cids`,
|
||||
`TRUNCATE eth.withdrawal_cids`,
|
||||
`TRUNCATE eth_meta.watched_addresses`,
|
||||
}
|
||||
for _, stm := range statements {
|
||||
|
Loading…
Reference in New Issue
Block a user