basefee is big.Int, it won't always fit in int64

This commit is contained in:
i-norden 2021-11-20 13:51:54 -06:00
parent b96c26fc97
commit b9a82f6350
16 changed files with 148 additions and 106 deletions

View File

@ -184,10 +184,10 @@ func (sdi *StateDiffIndexer) PushBlock(block *types.Block, receipts types.Receip
func (sdi *StateDiffIndexer) processHeader(tx *BatchTx, header *types.Header, headerNode node.Node, reward, td *big.Int) (string, error) { func (sdi *StateDiffIndexer) processHeader(tx *BatchTx, header *types.Header, headerNode node.Node, reward, td *big.Int) (string, error) {
tx.cacheIPLD(headerNode) tx.cacheIPLD(headerNode)
var baseFee *int64 var baseFee *string
if header.BaseFee != nil { if header.BaseFee != nil {
baseFee = new(int64) baseFee = new(string)
*baseFee = header.BaseFee.Int64() *baseFee = header.BaseFee.String()
} }
headerID := header.Hash().String() headerID := header.Hash().String()

View File

@ -191,10 +191,10 @@ func (sdi *StateDiffIndexer) PushBlock(block *types.Block, receipts types.Receip
func (sdi *StateDiffIndexer) processHeader(header *types.Header, headerNode node.Node, reward, td *big.Int) string { func (sdi *StateDiffIndexer) processHeader(header *types.Header, headerNode node.Node, reward, td *big.Int) string {
sdi.fileWriter.upsertIPLDNode(headerNode) sdi.fileWriter.upsertIPLDNode(headerNode)
var baseFee *int64 var baseFee *string
if header.BaseFee != nil { if header.BaseFee != nil {
baseFee = new(int64) baseFee = new(string)
*baseFee = header.BaseFee.Int64() *baseFee = header.BaseFee.String()
} }
headerID := header.Hash().String() headerID := header.Hash().String()
sdi.fileWriter.upsertHeaderCID(models.HeaderModel{ sdi.fileWriter.upsertHeaderCID(models.HeaderModel{

View File

@ -18,22 +18,21 @@ package file_test
import ( import (
"context" "context"
"errors"
"os" "os"
"testing" "testing"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/statediff/indexer/database/file"
"github.com/ethereum/go-ethereum/statediff/indexer/mocks"
"github.com/ipfs/go-cid" "github.com/ipfs/go-cid"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
"github.com/multiformats/go-multihash" "github.com/multiformats/go-multihash"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/ethereum/go-ethereum/statediff/indexer/database/sql" "github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/statediff/indexer/database/file"
"github.com/ethereum/go-ethereum/statediff/indexer/database/sql/postgres" "github.com/ethereum/go-ethereum/statediff/indexer/database/sql/postgres"
"github.com/ethereum/go-ethereum/statediff/indexer/interfaces" "github.com/ethereum/go-ethereum/statediff/indexer/interfaces"
"github.com/ethereum/go-ethereum/statediff/indexer/ipld" "github.com/ethereum/go-ethereum/statediff/indexer/ipld"
"github.com/ethereum/go-ethereum/statediff/indexer/mocks"
"github.com/ethereum/go-ethereum/statediff/indexer/test_helpers" "github.com/ethereum/go-ethereum/statediff/indexer/test_helpers"
) )
@ -46,7 +45,10 @@ var (
func setupLegacy(t *testing.T) { func setupLegacy(t *testing.T) {
mockLegacyBlock = legacyData.MockBlock mockLegacyBlock = legacyData.MockBlock
legacyHeaderCID, _ = ipld.RawdataToCid(ipld.MEthHeader, legacyData.MockHeaderRlp, multihash.KECCAK_256) legacyHeaderCID, _ = ipld.RawdataToCid(ipld.MEthHeader, legacyData.MockHeaderRlp, multihash.KECCAK_256)
if _, err := os.Stat(file.TestConfig.FilePath); !errors.Is(err, os.ErrNotExist) {
err := os.Remove(file.TestConfig.FilePath)
require.NoError(t, err)
}
ind, err := file.NewStateDiffIndexer(context.Background(), legacyData.Config, file.TestConfig) ind, err := file.NewStateDiffIndexer(context.Background(), legacyData.Config, file.TestConfig)
require.NoError(t, err) require.NoError(t, err)
var tx interfaces.Batch var tx interfaces.Batch
@ -69,7 +71,7 @@ func setupLegacy(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
} }
test_helpers.ExpectEqual(t, tx.(*sql.BatchTx).BlockNumber, legacyData.BlockNumber.Uint64()) test_helpers.ExpectEqual(t, tx.(*file.BatchTx).BlockNumber, legacyData.BlockNumber.Uint64())
connStr := postgres.DefaultConfig.DbConnectionString() connStr := postgres.DefaultConfig.DbConnectionString()
@ -88,6 +90,7 @@ func dumpData(t *testing.T) {
} }
func tearDown(t *testing.T) { func tearDown(t *testing.T) {
file.TearDownDB(t, sqlxdb)
err := os.Remove(file.TestConfig.FilePath) err := os.Remove(file.TestConfig.FilePath)
require.NoError(t, err) require.NoError(t, err)
err = sqlxdb.Close() err = sqlxdb.Close()
@ -100,12 +103,12 @@ func expectTrue(t *testing.T, value bool) {
} }
} }
func TestFIleIndexerLegacy(t *testing.T) { func TestFileIndexerLegacy(t *testing.T) {
t.Run("Publish and index header IPLDs in a legacy tx", func(t *testing.T) { t.Run("Publish and index header IPLDs", func(t *testing.T) {
setupLegacy(t) setupLegacy(t)
dumpData(t) dumpData(t)
defer tearDown(t) defer tearDown(t)
pgStr := `SELECT cid, cast(td AS TEXT), cast(reward AS TEXT), block_hash, base_fee pgStr := `SELECT cid, td, reward, block_hash, base_fee
FROM eth.header_cids FROM eth.header_cids
WHERE block_number = $1` WHERE block_number = $1`
// check header was properly indexed // check header was properly indexed
@ -113,13 +116,11 @@ func TestFIleIndexerLegacy(t *testing.T) {
CID string CID string
TD string TD string
Reward string Reward string
BlockHash string `db:"block_hash"` BlockHash string `db:"block_hash"`
BaseFee *int64 `db:"base_fee"` BaseFee *string `db:"base_fee"`
} }
header := new(res) header := new(res)
err = sqlxdb.QueryRowx(pgStr, legacyData.BlockNumber.Uint64()).StructScan(header)
err = sqlxdb.QueryRow(pgStr, legacyData.BlockNumber.Uint64()).Scan(
&header.CID, &header.TD, &header.Reward, &header.BlockHash, &header.BaseFee)
require.NoError(t, err) require.NoError(t, err)
test_helpers.ExpectEqual(t, header.CID, legacyHeaderCID.String()) test_helpers.ExpectEqual(t, header.CID, legacyHeaderCID.String())

View File

@ -19,29 +19,29 @@ package file_test
import ( import (
"bytes" "bytes"
"context" "context"
"errors"
"fmt" "fmt"
"os" "os"
"testing" "testing"
"github.com/ethereum/go-ethereum/statediff/indexer/database/file" "github.com/ethereum/go-ethereum/common"
"github.com/jmoiron/sqlx" "github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/statediff/indexer/models"
"github.com/ethereum/go-ethereum/statediff/indexer/shared"
"github.com/ipfs/go-cid" "github.com/ipfs/go-cid"
blockstore "github.com/ipfs/go-ipfs-blockstore" blockstore "github.com/ipfs/go-ipfs-blockstore"
dshelp "github.com/ipfs/go-ipfs-ds-help" dshelp "github.com/ipfs/go-ipfs-ds-help"
"github.com/jmoiron/sqlx"
"github.com/multiformats/go-multihash" "github.com/multiformats/go-multihash"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/rlp" "github.com/ethereum/go-ethereum/statediff/indexer/database/file"
"github.com/ethereum/go-ethereum/statediff/indexer/database/sql"
"github.com/ethereum/go-ethereum/statediff/indexer/database/sql/postgres" "github.com/ethereum/go-ethereum/statediff/indexer/database/sql/postgres"
"github.com/ethereum/go-ethereum/statediff/indexer/interfaces" "github.com/ethereum/go-ethereum/statediff/indexer/interfaces"
"github.com/ethereum/go-ethereum/statediff/indexer/ipld" "github.com/ethereum/go-ethereum/statediff/indexer/ipld"
"github.com/ethereum/go-ethereum/statediff/indexer/mocks" "github.com/ethereum/go-ethereum/statediff/indexer/mocks"
"github.com/ethereum/go-ethereum/statediff/indexer/models"
"github.com/ethereum/go-ethereum/statediff/indexer/shared"
"github.com/ethereum/go-ethereum/statediff/indexer/test_helpers" "github.com/ethereum/go-ethereum/statediff/indexer/test_helpers"
) )
@ -135,7 +135,11 @@ func init() {
} }
func setup(t *testing.T) { func setup(t *testing.T) {
ind, err := file.NewStateDiffIndexer(context.Background(), legacyData.Config, file.TestConfig) if _, err := os.Stat(file.TestConfig.FilePath); !errors.Is(err, os.ErrNotExist) {
err := os.Remove(file.TestConfig.FilePath)
require.NoError(t, err)
}
ind, err = file.NewStateDiffIndexer(context.Background(), mocks.TestConfig, file.TestConfig)
require.NoError(t, err) require.NoError(t, err)
var tx interfaces.Batch var tx interfaces.Batch
tx, err = ind.PushBlock( tx, err = ind.PushBlock(
@ -158,7 +162,7 @@ func setup(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
} }
test_helpers.ExpectEqual(t, tx.(*sql.BatchTx).BlockNumber, mocks.BlockNumber.Uint64()) test_helpers.ExpectEqual(t, tx.(*file.BatchTx).BlockNumber, mocks.BlockNumber.Uint64())
connStr := postgres.DefaultConfig.DbConnectionString() connStr := postgres.DefaultConfig.DbConnectionString()
@ -168,7 +172,7 @@ func setup(t *testing.T) {
} }
} }
func TestSQLXIndexer(t *testing.T) { func TestFileIndexer(t *testing.T) {
t.Run("Publish and index header IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index header IPLDs in a single tx", func(t *testing.T) {
setup(t) setup(t)
dumpData(t) dumpData(t)
@ -181,18 +185,19 @@ func TestSQLXIndexer(t *testing.T) {
CID string CID string
TD string TD string
Reward string Reward string
BlockHash string `db:"block_hash"` BlockHash string `db:"block_hash"`
BaseFee *int64 `db:"base_fee"` BaseFee *string `db:"base_fee"`
} }
header := new(res) header := new(res)
err = sqlxdb.QueryRowx(pgStr, mocks.BlockNumber.Uint64()).StructScan(header) err = sqlxdb.QueryRowx(pgStr, mocks.BlockNumber.Uint64()).StructScan(header)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
test_helpers.ExpectEqual(t, header.CID, headerCID.String()) test_helpers.ExpectEqual(t, header.CID, headerCID.String())
test_helpers.ExpectEqual(t, header.TD, mocks.MockBlock.Difficulty().String()) test_helpers.ExpectEqual(t, header.TD, mocks.MockBlock.Difficulty().String())
test_helpers.ExpectEqual(t, header.Reward, "2000000000000021250") test_helpers.ExpectEqual(t, header.Reward, "2000000000000021250")
test_helpers.ExpectEqual(t, *header.BaseFee, mocks.MockHeader.BaseFee.Int64()) test_helpers.ExpectEqual(t, *header.BaseFee, mocks.MockHeader.BaseFee.String())
dc, err := cid.Decode(header.CID) dc, err := cid.Decode(header.CID)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -206,7 +211,6 @@ func TestSQLXIndexer(t *testing.T) {
} }
test_helpers.ExpectEqual(t, data, mocks.MockHeaderRlp) test_helpers.ExpectEqual(t, data, mocks.MockHeaderRlp)
}) })
t.Run("Publish and index transaction IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index transaction IPLDs in a single tx", func(t *testing.T) {
setup(t) setup(t)
dumpData(t) dumpData(t)

View File

@ -1,28 +1,64 @@
package file package file
import "io" import (
"testing"
type MemWriter struct { "github.com/jmoiron/sqlx"
bytes []byte )
}
func NewMemWriter() io.WriteCloser { // TearDownDB is used to tear down the watcher dbs after tests
return &MemWriter{} func TearDownDB(t *testing.T, db *sqlx.DB) {
} tx, err := db.Begin()
if err != nil {
t.Fatal(err)
}
// Write satisfies io.WriteCloser _, err = tx.Exec(`DELETE FROM eth.header_cids`)
func (mw *MemWriter) Write(b []byte) (int, error) { if err != nil {
mw.bytes = append(mw.bytes, b...) t.Fatal(err)
return len(b), nil }
} _, err = tx.Exec(`DELETE FROM eth.uncle_cids`)
if err != nil {
// Close satisfies io.WriteCloser t.Fatal(err)
func (mw *MemWriter) Close() error { }
mw.bytes = []byte{} _, err = tx.Exec(`DELETE FROM eth.transaction_cids`)
return nil if err != nil {
} t.Fatal(err)
}
// ReadAll returns all the bytes written to the memory writer _, err = tx.Exec(`DELETE FROM eth.receipt_cids`)
func (mw *MemWriter) ReadAll() []byte { if err != nil {
return mw.bytes t.Fatal(err)
}
_, err = tx.Exec(`DELETE FROM eth.state_cids`)
if err != nil {
t.Fatal(err)
}
_, err = tx.Exec(`DELETE FROM eth.storage_cids`)
if err != nil {
t.Fatal(err)
}
_, err = tx.Exec(`DELETE FROM eth.state_accounts`)
if err != nil {
t.Fatal(err)
}
_, err = tx.Exec(`DELETE FROM eth.access_list_elements`)
if err != nil {
t.Fatal(err)
}
_, err = tx.Exec(`DELETE FROM eth.log_cids`)
if err != nil {
t.Fatal(err)
}
_, err = tx.Exec(`DELETE FROM blocks`)
if err != nil {
t.Fatal(err)
}
_, err = tx.Exec(`DELETE FROM nodes`)
if err != nil {
t.Fatal(err)
}
err = tx.Commit()
if err != nil {
t.Fatal(err)
}
} }

View File

@ -121,21 +121,21 @@ const (
nodeInsert = "INSERT INTO nodes (genesis_block, network_id, node_id, client_name, chain_id) VALUES " + nodeInsert = "INSERT INTO nodes (genesis_block, network_id, node_id, client_name, chain_id) VALUES " +
"('%s', '%s', '%s', '%s', %d);\n" "('%s', '%s', '%s', '%s', %d);\n"
ipldInsert = "INSERT INTO public.blocks (key, data) VALUES ('%s', '%x');\n" ipldInsert = "INSERT INTO public.blocks (key, data) VALUES ('%s', '\\x%x');\n"
headerInsert = "INSERT INTO eth.header_cids (block_number, block_hash, parent_hash, cid, td, node_id, reward, " + headerInsert = "INSERT INTO eth.header_cids (block_number, block_hash, parent_hash, cid, td, node_id, reward, " +
"state_root, tx_root, receipt_root, uncle_root, bloom, timestamp, mh_key, times_validated, base_fee) VALUES " + "state_root, tx_root, receipt_root, uncle_root, bloom, timestamp, mh_key, times_validated, base_fee) VALUES " +
"('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%x', %d, '%s', %d, %d);\n" "('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '\\x%x', %d, '%s', %d, %s);\n"
headerInsertWithoutBaseFee = "INSERT INTO eth.header_cids (block_number, block_hash, parent_hash, cid, td, node_id, " + headerInsertWithoutBaseFee = "INSERT INTO eth.header_cids (block_number, block_hash, parent_hash, cid, td, node_id, " +
"reward, state_root, tx_root, receipt_root, uncle_root, bloom, timestamp, mh_key, times_validated, base_fee) VALUES " + "reward, state_root, tx_root, receipt_root, uncle_root, bloom, timestamp, mh_key, times_validated, base_fee) VALUES " +
"('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%x', %d, '%s', %d, NULL);\n" "('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '\\x%x', %d, '%s', %d, NULL);\n"
uncleInsert = "INSERT INTO eth.uncle_cids (block_hash, header_id, parent_hash, cid, reward, mh_key) VALUES " + uncleInsert = "INSERT INTO eth.uncle_cids (block_hash, header_id, parent_hash, cid, reward, mh_key) VALUES " +
"('%s', '%s', '%s', '%s', '%s', '%s');\n" "('%s', '%s', '%s', '%s', '%s', '%s');\n"
txInsert = "INSERT INTO eth.transaction_cids (header_id, tx_hash, cid, dst, src, index, mh_key, tx_data, tx_type) " + txInsert = "INSERT INTO eth.transaction_cids (header_id, tx_hash, cid, dst, src, index, mh_key, tx_data, tx_type) " +
"VALUES ('%s', '%s', '%s', '%s', '%s', %d, '%s', '%x', %d);\n" "VALUES ('%s', '%s', '%s', '%s', '%s', %d, '%s', '\\x%x', %d);\n"
alInsert = "INSERT INTO eth.access_list_elements (tx_id, index, address, storage_keys) VALUES ('%s', %d, '%s', '%s');\n" alInsert = "INSERT INTO eth.access_list_elements (tx_id, index, address, storage_keys) VALUES ('%s', %d, '%s', '%s');\n"
@ -143,16 +143,16 @@ const (
"post_status, log_root) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', %d, '%s');\n" "post_status, log_root) VALUES ('%s', '%s', '%s', '%s', '%s', '%s', %d, '%s');\n"
logInsert = "INSERT INTO eth.log_cids (leaf_cid, leaf_mh_key, rct_id, address, index, topic0, topic1, topic2, " + logInsert = "INSERT INTO eth.log_cids (leaf_cid, leaf_mh_key, rct_id, address, index, topic0, topic1, topic2, " +
"topic3, log_data) VALUES ('%s', '%s', '%s', '%s', %d, '%s', '%s', '%s', '%s', '%x');\n" "topic3, log_data) VALUES ('%s', '%s', '%s', '%s', %d, '%s', '%s', '%s', '%s', '\\x%x');\n"
stateInsert = "INSERT INTO eth.state_cids (header_id, state_leaf_key, cid, state_path, node_type, diff, mh_key) " + stateInsert = "INSERT INTO eth.state_cids (header_id, state_leaf_key, cid, state_path, node_type, diff, mh_key) " +
"VALUES ('%s', '%s', '%s', '%x', %d, %t, '%s');\n" "VALUES ('%s', '%s', '%s', '\\x%x', %d, %t, '%s');\n"
accountInsert = "INSERT INTO eth.state_accounts (header_id, state_path, balance, nonce, code_hash, storage_root) " + accountInsert = "INSERT INTO eth.state_accounts (header_id, state_path, balance, nonce, code_hash, storage_root) " +
"VALUES ('%s', '%x', '%s', %d, '%x', '%s');\n" "VALUES ('%s', '\\x%x', '%s', %d, '\\x%x', '%s');\n"
storageInsert = "INSERT INTO eth.storage_cids (header_id, state_path, storage_leaf_key, cid, storage_path, " + storageInsert = "INSERT INTO eth.storage_cids (header_id, state_path, storage_leaf_key, cid, storage_path, " +
"node_type, diff, mh_key) VALUES ('%s', '%x', '%s', '%s', '%x', %d, %t, '%s');\n" "node_type, diff, mh_key) VALUES ('%s', '\\x%x', '%s', '%s', '\\x%x', %d, %t, '%s');\n"
) )
func (sqw *SQLWriter) upsertNode(node nodeinfo.Info) { func (sqw *SQLWriter) upsertNode(node nodeinfo.Info) {
@ -199,7 +199,7 @@ func (sqw *SQLWriter) upsertHeaderCID(header models.HeaderModel) {
} else { } else {
stmt = fmt.Sprintf(headerInsert, header.BlockNumber, header.BlockHash, header.ParentHash, header.CID, stmt = fmt.Sprintf(headerInsert, header.BlockNumber, header.BlockHash, header.ParentHash, header.CID,
header.TotalDifficulty, header.NodeID, header.Reward, header.StateRoot, header.TxRoot, header.TotalDifficulty, header.NodeID, header.Reward, header.StateRoot, header.TxRoot,
header.RctRoot, header.UncleRoot, header.Bloom, header.Timestamp, header.MhKey, 1, header.BaseFee) header.RctRoot, header.UncleRoot, header.Bloom, header.Timestamp, header.MhKey, 1, *header.BaseFee)
} }
sqw.stmts <- []byte(stmt) sqw.stmts <- []byte(stmt)
indexerMetrics.blocks.Inc(1) indexerMetrics.blocks.Inc(1)

View File

@ -235,10 +235,10 @@ func (sdi *StateDiffIndexer) PushBlock(block *types.Block, receipts types.Receip
func (sdi *StateDiffIndexer) processHeader(tx *BatchTx, header *types.Header, headerNode node.Node, reward, td *big.Int) (string, error) { func (sdi *StateDiffIndexer) processHeader(tx *BatchTx, header *types.Header, headerNode node.Node, reward, td *big.Int) (string, error) {
tx.cacheIPLD(headerNode) tx.cacheIPLD(headerNode)
var baseFee *int64 var baseFee *string
if header.BaseFee != nil { if header.BaseFee != nil {
baseFee = new(int64) baseFee = new(string)
*baseFee = header.BaseFee.Int64() *baseFee = header.BaseFee.String()
} }
headerID := header.Hash().String() headerID := header.Hash().String()
// index header // index header

View File

@ -60,7 +60,7 @@ func setupLegacyPGX(t *testing.T) {
} }
func TestPGXIndexerLegacy(t *testing.T) { func TestPGXIndexerLegacy(t *testing.T) {
t.Run("Publish and index header IPLDs in a legacy tx", func(t *testing.T) { t.Run("Publish and index header IPLDs", func(t *testing.T) {
setupLegacyPGX(t) setupLegacyPGX(t)
defer tearDown(t) defer tearDown(t)
pgStr := `SELECT cid, cast(td AS TEXT), cast(reward AS TEXT), block_hash, base_fee pgStr := `SELECT cid, cast(td AS TEXT), cast(reward AS TEXT), block_hash, base_fee

View File

@ -150,7 +150,7 @@ func TestPGXIndexer(t *testing.T) {
t.Run("Publish and index header IPLDs in a single tx", func(t *testing.T) { t.Run("Publish and index header IPLDs in a single tx", func(t *testing.T) {
setupPGX(t) setupPGX(t)
defer tearDown(t) defer tearDown(t)
pgStr := `SELECT cid, cast(td AS TEXT), cast(reward AS TEXT), block_hash, base_fee pgStr := `SELECT cid, cast(td AS TEXT), cast(reward AS TEXT), block_hash, cast(base_fee AS TEXT)
FROM eth.header_cids FROM eth.header_cids
WHERE block_number = $1` WHERE block_number = $1`
// check header was properly indexed // check header was properly indexed
@ -158,8 +158,8 @@ func TestPGXIndexer(t *testing.T) {
CID string CID string
TD string TD string
Reward string Reward string
BlockHash string `db:"block_hash"` BlockHash string `db:"block_hash"`
BaseFee *int64 `db:"base_fee"` BaseFee *string `db:"base_fee"`
} }
header := new(res) header := new(res)
err = db.QueryRow(context.Background(), pgStr, mocks.BlockNumber.Uint64()).Scan( err = db.QueryRow(context.Background(), pgStr, mocks.BlockNumber.Uint64()).Scan(
@ -174,7 +174,7 @@ func TestPGXIndexer(t *testing.T) {
test_helpers.ExpectEqual(t, header.CID, headerCID.String()) test_helpers.ExpectEqual(t, header.CID, headerCID.String())
test_helpers.ExpectEqual(t, header.TD, mocks.MockBlock.Difficulty().String()) test_helpers.ExpectEqual(t, header.TD, mocks.MockBlock.Difficulty().String())
test_helpers.ExpectEqual(t, header.Reward, "2000000000000021250") test_helpers.ExpectEqual(t, header.Reward, "2000000000000021250")
test_helpers.ExpectEqual(t, *header.BaseFee, mocks.MockHeader.BaseFee.Int64()) test_helpers.ExpectEqual(t, *header.BaseFee, mocks.MockHeader.BaseFee.String())
dc, err := cid.Decode(header.CID) dc, err := cid.Decode(header.CID)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)

View File

@ -47,10 +47,10 @@ func TestPostgresPGX(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("failed to connect to db with connection string: %s err: %v", pgConfig.ConnString(), err) t.Fatalf("failed to connect to db with connection string: %s err: %v", pgConfig.ConnString(), err)
} }
defer dbPool.Close()
if dbPool == nil { if dbPool == nil {
t.Fatal("DB pool is nil") t.Fatal("DB pool is nil")
} }
dbPool.Close()
}) })
t.Run("serializes big.Int to db", func(t *testing.T) { t.Run("serializes big.Int to db", func(t *testing.T) {
@ -111,8 +111,7 @@ func TestPostgresPGX(t *testing.T) {
badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100)) badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100))
badInfo := node.Info{GenesisBlock: badHash, NetworkID: "1", ID: "x123", ClientName: "geth"} badInfo := node.Info{GenesisBlock: badHash, NetworkID: "1", ID: "x123", ClientName: "geth"}
d, err := postgres.NewPGXDriver(ctx, postgres.DefaultConfig, badInfo) _, err := postgres.NewPGXDriver(ctx, postgres.DefaultConfig, badInfo)
defer d.Close()
if err == nil { if err == nil {
t.Fatal("Expected an error") t.Fatal("Expected an error")
} }

View File

@ -109,15 +109,11 @@ func TestPostgresSQLX(t *testing.T) {
badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100)) badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100))
badInfo := node.Info{GenesisBlock: badHash, NetworkID: "1", ID: "x123", ClientName: "geth"} badInfo := node.Info{GenesisBlock: badHash, NetworkID: "1", ID: "x123", ClientName: "geth"}
d, err := postgres.NewSQLXDriver(ctx, postgres.DefaultConfig, badInfo) _, err := postgres.NewSQLXDriver(ctx, postgres.DefaultConfig, badInfo)
if err == nil { if err == nil {
t.Fatal("Expected an error") t.Fatal("Expected an error")
} }
expectContainsSubstring(t, err.Error(), postgres.SettingNodeFailedMsg) expectContainsSubstring(t, err.Error(), postgres.SettingNodeFailedMsg)
err = d.Close()
if err != nil {
t.Fatal(err)
}
}) })
} }

View File

@ -70,7 +70,7 @@ func setupLegacySQLX(t *testing.T) {
} }
func TestSQLXIndexerLegacy(t *testing.T) { func TestSQLXIndexerLegacy(t *testing.T) {
t.Run("Publish and index header IPLDs in a legacy tx", func(t *testing.T) { t.Run("Publish and index header IPLDs", func(t *testing.T) {
setupLegacySQLX(t) setupLegacySQLX(t)
defer tearDown(t) defer tearDown(t)
pgStr := `SELECT cid, td, reward, block_hash, base_fee pgStr := `SELECT cid, td, reward, block_hash, base_fee

View File

@ -185,8 +185,8 @@ func TestSQLXIndexer(t *testing.T) {
CID string CID string
TD string TD string
Reward string Reward string
BlockHash string `db:"block_hash"` BlockHash string `db:"block_hash"`
BaseFee *int64 `db:"base_fee"` BaseFee *string `db:"base_fee"`
} }
header := new(res) header := new(res)
err = db.QueryRow(context.Background(), pgStr, mocks.BlockNumber.Uint64()).(*sqlx.Row).StructScan(header) err = db.QueryRow(context.Background(), pgStr, mocks.BlockNumber.Uint64()).(*sqlx.Row).StructScan(header)
@ -196,7 +196,7 @@ func TestSQLXIndexer(t *testing.T) {
test_helpers.ExpectEqual(t, header.CID, headerCID.String()) test_helpers.ExpectEqual(t, header.CID, headerCID.String())
test_helpers.ExpectEqual(t, header.TD, mocks.MockBlock.Difficulty().String()) test_helpers.ExpectEqual(t, header.TD, mocks.MockBlock.Difficulty().String())
test_helpers.ExpectEqual(t, header.Reward, "2000000000000021250") test_helpers.ExpectEqual(t, header.Reward, "2000000000000021250")
test_helpers.ExpectEqual(t, *header.BaseFee, mocks.MockHeader.BaseFee.Int64()) test_helpers.ExpectEqual(t, *header.BaseFee, mocks.MockHeader.BaseFee.String())
dc, err := cid.Decode(header.CID) dc, err := cid.Decode(header.CID)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)

View File

@ -33,6 +33,10 @@ func TearDownDB(t *testing.T, db Database) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
_, err = tx.Exec(ctx, `DELETE FROM eth.uncle_cids`)
if err != nil {
t.Fatal(err)
}
_, err = tx.Exec(ctx, `DELETE FROM eth.transaction_cids`) _, err = tx.Exec(ctx, `DELETE FROM eth.transaction_cids`)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -65,6 +69,10 @@ func TearDownDB(t *testing.T, db Database) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
_, err = tx.Exec(ctx, `DELETE FROM nodes`)
if err != nil {
t.Fatal(err)
}
err = tx.Commit(ctx) err = tx.Commit(ctx)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)

View File

@ -22,18 +22,16 @@ import (
"crypto/rand" "crypto/rand"
"math/big" "math/big"
"github.com/ethereum/go-ethereum/statediff/indexer/models"
"github.com/ethereum/go-ethereum/trie"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/log" "github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/params" "github.com/ethereum/go-ethereum/params"
"github.com/ethereum/go-ethereum/rlp" "github.com/ethereum/go-ethereum/rlp"
"github.com/ethereum/go-ethereum/statediff/indexer/models"
"github.com/ethereum/go-ethereum/statediff/test_helpers" "github.com/ethereum/go-ethereum/statediff/test_helpers"
sdtypes "github.com/ethereum/go-ethereum/statediff/types" sdtypes "github.com/ethereum/go-ethereum/statediff/types"
"github.com/ethereum/go-ethereum/trie"
) )
// Test variables // Test variables

View File

@ -26,22 +26,22 @@ type IPLDModel struct {
// HeaderModel is the db model for eth.header_cids // HeaderModel is the db model for eth.header_cids
type HeaderModel struct { type HeaderModel struct {
BlockNumber string `db:"block_number"` BlockNumber string `db:"block_number"`
BlockHash string `db:"block_hash"` BlockHash string `db:"block_hash"`
ParentHash string `db:"parent_hash"` ParentHash string `db:"parent_hash"`
CID string `db:"cid"` CID string `db:"cid"`
MhKey string `db:"mh_key"` MhKey string `db:"mh_key"`
TotalDifficulty string `db:"td"` TotalDifficulty string `db:"td"`
NodeID string `db:"node_id"` NodeID string `db:"node_id"`
Reward string `db:"reward"` Reward string `db:"reward"`
StateRoot string `db:"state_root"` StateRoot string `db:"state_root"`
UncleRoot string `db:"uncle_root"` UncleRoot string `db:"uncle_root"`
TxRoot string `db:"tx_root"` TxRoot string `db:"tx_root"`
RctRoot string `db:"receipt_root"` RctRoot string `db:"receipt_root"`
Bloom []byte `db:"bloom"` Bloom []byte `db:"bloom"`
Timestamp uint64 `db:"timestamp"` Timestamp uint64 `db:"timestamp"`
TimesValidated int64 `db:"times_validated"` TimesValidated int64 `db:"times_validated"`
BaseFee *int64 `db:"base_fee"` BaseFee *string `db:"base_fee"`
} }
// UncleModel is the db model for eth.uncle_cids // UncleModel is the db model for eth.uncle_cids